I'm trying to create a guix package for the xsv crate. I use output of guix import crate xsv and add the necessary use-modules lines, which gives the following rust_xsv.scm file :
(use-modules (guix packages)
(guix download)
(guix build-system cargo)
((guix licenses) #:prefix license:))
;; (define-public rust-xsv-0.13
(package
(name "rust-xsv")
(version "0.13.0")
(source (origin
(method url-fetch)
(uri (crate-uri "xsv" version))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"0pvzr7x5phlya6m5yikvy13vgbazshw0plysckz9zmf2ly5x4jl8"))))
(build-system cargo-build-system)
(arguments
`(#:cargo-inputs (("rust-byteorder" ,rust-byteorder-1)
("rust-chan" ,rust-chan-0.1)
("rust-csv" ,rust-csv-1)
("rust-csv-index" ,rust-csv-index-0.1)
("rust-docopt" ,rust-docopt-1)
("rust-filetime" ,rust-filetime-0.1)
("rust-num-cpus" ,rust-num-cpus-1)
("rust-rand" ,rust-rand-0.4)
("rust-regex" ,rust-regex-1)
("rust-serde" ,rust-serde-1)
("rust-serde-derive" ,rust-serde-derive-1)
("rust-streaming-stats" ,rust-streaming-stats-0.2)
("rust-tabwriter" ,rust-tabwriter-1)
("rust-threadpool" ,rust-threadpool-1))
#:cargo-development-inputs (("rust-log" ,rust-log-0.4)
("rust-quickcheck" ,rust-quickcheck-0.6))))
(home-page "https://github.com/BurntSushi/xsv")
(synopsis "A high performance CSV command line toolkit.")
(description
"This package provides a high performance CSV command line toolkit.")
(license (list license:unlicense license:expat)))
;; )
When I try to build this package with guix package --install-from-file=./rust-xsv.scm I get the error error: rust-byteorder-1: unbound variable, in other words it doesn´t find the cargo-input dependencies. What am I missing ?
Here is the complete trace :
The following package will be installed:
rust-xsv 0.13.0
Backtrace:
In guix/ui.scm:
2263:7 19 (run-guix . _)
2226:10 18 (run-guix-command _ . _)
In ice-9/boot-9.scm:
1752:10 17 (with-exception-handler _ _ #:unwind? _ # _)
In guix/status.scm:
835:3 16 (_)
815:4 15 (call-with-status-report _ _)
In guix/store.scm:
1298:8 14 (call-with-build-handler #<procedure 7f93512c2b70 at g…> …)
In guix/build/syscalls.scm:
1428:3 13 (_)
1395:4 12 (call-with-file-lock/no-wait _ #<procedure 7f934238684…> …)
In guix/scripts/package.scm:
153:19 11 (build-and-use-profile #<store-connection 256.99 7f935…> …)
In guix/store.scm:
2168:25 10 (run-with-store #<store-connection 256.99 7f93511bf640> …)
In guix/profiles.scm:
666:3 9 (_ _)
In srfi/srfi-1.scm:
586:17 8 (map1 (#<<manifest-entry> name: "rust-xsv" version: …> …))
In guix/profiles.scm:
1929:19 7 (_ _)
In guix/packages.scm:
1279:17 6 (supported-package? #<package rust-xsv#0.13.0 /home/he…> …)
In guix/memoization.scm:
101:0 5 (_ #<hash-table 7f93429e7f00 0/31> #<package rust-xsv#…> …)
In guix/packages.scm:
1257:37 4 (_)
1517:16 3 (package->bag _ _ _ #:graft? _)
1622:43 2 (thunk)
In /home/henri/travaux/www/orchestraGuix/rust-xsv.scm:
19:42 1 (arguments #<package rust-xsv#0.13.0 /home/henri/travau…>)
In ice-9/boot-9.scm:
1685:16 0 (raise-exception _ #:continuable? _)
ice-9/boot-9.scm:1685:16: In procedure raise-exception:
error: rust-byteorder-1: unbound variable
I was missing two things :
an import of the (gnu packages crate-io) in order to have access to the crates already packaged in the guix repository (like rust-byteorder)
the -r option in the initial command guix import crate -r xsv in order to get the definitions of the crates not already packaged by guix repo.
Here is the fixed file :
(use-modules (guix packages)
(guix download)
(guix build-system cargo)
(gnu packages crates-io)
((guix licenses) #:prefix license:))
(define-public rust-filetime-0.1
(package
(name "rust-filetime")
(version "0.1.15")
(source (origin
(method url-fetch)
(uri (crate-uri "filetime" version))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"03xishfxzpr4nfz4g3r218d6b6g94rxsqw9pw96m6wa8wgrm6iki"))))
(build-system cargo-build-system)
(arguments
`(#:skip-build? #t
#:cargo-inputs (("rust-cfg-if" ,rust-cfg-if-0.1)
("rust-libc" ,rust-libc-0.2)
("rust-redox-syscall" ,rust-redox-syscall-0.1))))
(home-page "https://github.com/alexcrichton/filetime")
(synopsis "Platform-agnostic accessors of timestamps in File metadata
")
(description "Platform-agnostic accessors of timestamps in File metadata")
(license (list license:expat license:asl2.0))))
(define-public rust-csv-index-0.1
(package
(name "rust-csv-index")
(version "0.1.6")
(source (origin
(method url-fetch)
(uri (crate-uri "csv-index" version))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"01048y84y0bakqm0x4y1svjv6lzc753b9q598xp7xgcqrdgi6x7j"))))
(build-system cargo-build-system)
(arguments
`(#:skip-build? #t
#:cargo-inputs (("rust-byteorder" ,rust-byteorder-1)
("rust-csv" ,rust-csv-1))))
(home-page "https://github.com/BurntSushi/rust-csv")
(synopsis "On disk CSV indexing data structures.")
(description "On disk CSV indexing data structures.")
(license (list license:unlicense license:expat))))
(define-public rust-chan-0.1
(package
(name "rust-chan")
(version "0.1.23")
(source (origin
(method url-fetch)
(uri (crate-uri "chan" version))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"1n0y992mqfk5zpxzvrv14g9qivacmd4fiv4j1nmgyrg0vaimcjfi"))))
(build-system cargo-build-system)
(arguments
`(#:skip-build? #t
#:cargo-inputs (("rust-rand" ,rust-rand-0.3))))
(home-page "https://github.com/BurntSushi/chan")
(synopsis "DEPRECATED. Use crossbeam-channel instead.")
(description "DEPRECATED. Use crossbeam-channel instead.")
(license (list license:unlicense license:expat))))
;; (define-public rust-xsv-0.13
(package
(name "rust-xsv")
(version "0.13.0")
(source (origin
(method url-fetch)
(uri (crate-uri "xsv" version))
(file-name (string-append name "-" version ".tar.gz"))
(sha256
(base32
"0pvzr7x5phlya6m5yikvy13vgbazshw0plysckz9zmf2ly5x4jl8"))))
(build-system cargo-build-system)
(arguments
`(#:cargo-inputs (("rust-byteorder" ,rust-byteorder-1)
("rust-chan" ,rust-chan-0.1)
("rust-csv" ,rust-csv-1)
("rust-csv-index" ,rust-csv-index-0.1)
("rust-docopt" ,rust-docopt-1)
("rust-filetime" ,rust-filetime-0.1)
("rust-num-cpus" ,rust-num-cpus-1)
("rust-rand" ,rust-rand-0.4)
("rust-regex" ,rust-regex-1)
("rust-serde" ,rust-serde-1)
("rust-serde-derive" ,rust-serde-derive-1)
("rust-streaming-stats" ,rust-streaming-stats-0.2)
("rust-tabwriter" ,rust-tabwriter-1)
("rust-threadpool" ,rust-threadpool-1))
#:cargo-development-inputs (("rust-log" ,rust-log-0.4)
("rust-quickcheck" ,rust-quickcheck-0.6))))
(home-page "https://github.com/BurntSushi/xsv")
(synopsis "A high performance CSV command line toolkit.")
(description
"This package provides a high performance CSV command line toolkit.")
(license (list license:unlicense license:expat)))
;; )
Related
When using cabal to build a Haskell package, it appears to mark some packages as legacy fallback:
$ cabal build
Resolving dependencies...
Build profile: -w ghc-9.0.1 -O1
In order, the following will be built (use -v for more details):
- appar-0.1.8 (lib:appar) (requires build)
- auto-update-0.1.6 (lib) (requires build)
- base-compat-0.11.2 (lib) (requires build)
...
Building base-orphans-0.8.4 (lib)
Building appar-0.1.8 (all, legacy fallback)
Downloaded memory-0.16.0
Downloading cryptonite-0.29
Installing base-orphans-0.8.4 (lib)
Downloaded cryptonite-0.29
Downloading some-1.0.3
...
You can see that for some libraries, they are specifically marked (lib), but other libraries are marked (all, legacy fallback).
What is the difference between these? What does legacy fallback mean?
I am using cabal-install version 3.4.0.0:
$ cabal --version
cabal-install version 3.4.0.0
compiled using version 3.4.0.0 of the Cabal library
I took a dive in the source code. The error message comes form here:
dispname = case elabPkgOrComp pkg of
ElabPackage _ -> prettyShow pkgid
++ " (all, legacy fallback)"
ElabComponent comp -> prettyShow pkgid
++ " (" ++ maybe "custom" prettyShow (compComponentName comp) ++ ")"
So I started looking for places where ElabPackage is constructed. I found this:
elaborateSolverToPackage
...
where
...
elab1 = elab0 {
elabUnitId = newSimpleUnitId pkgInstalledId,
elabComponentId = pkgInstalledId,
elabLinkedInstantiatedWith = Map.empty,
elabPkgOrComp = ElabPackage $ ElaboratedPackage {..},
elabModuleShape = modShape
}
This in turn is used here:
elaborateSolverToComponents mapDep spkg#(SolverPackage _ _ _ deps0 exe_deps0)
= case mkComponentsGraph (elabEnabledSpec elab0) pd of
Right g -> do
...
let not_per_component_reasons = why_not_per_component src_comps
if null not_per_component_reasons
then return comps
else do checkPerPackageOk comps not_per_component_reasons
return [elaborateSolverToPackage spkg g $
comps ++ maybeToList setupComponent]
Now the why_not_per_component is very interesting as that function determines when to use the legacy fallback. It is defined here:
-- You are eligible to per-component build if this list is empty
why_not_per_component g
= cuz_buildtype ++ cuz_spec ++ cuz_length ++ cuz_flag ++ cuz_coverage
There in the code right below that we can see that it can be caused by these reasons:
The build-type is Custom or Configure
The cabal-version is less than 1.8
There are no buildable components
You passed the --disable-per-component flag.
Program coverage is enabled
So for the appar library it is because the cabal-version is 1.6 which is lower than 1.8, see https://github.com/kazu-yamamoto/appar/blob/v0.1.8/appar.cabal#L10.
New to Linux and CMake ,I'm trying to build a model (SCHISM) with CMake and I have an error about the netCDF libraries...
The path is :
This netCDF 4.7.3 has been built with the following features:
--cc -> /usr/bin/cc
--cflags -> -I/usr/include -I/usr/include/hdf5/serial
--libs -> -L/usr/lib/x86_64-linux-gnu -L/usr/lib/x86_64-linux-gnu/hdf5/serial -lnetcdf
--static -> -lhdf5_hl -lhdf5 -lpthread -lsz -lz -ldl -lm -lcurl
--has-fortran -> yes
--fc -> gfortran
--fflags -> -I/usr/include
--flibs -> -L/usr/lib/x86_64-linux-gnu -lnetcdff -Wl,-Bsymbolic-functions -Wl,-z,relro -Wl,-z,now -lnetcdf -lnetcdf -ldl -lm
So I set the path like this in my script TEST:
set(NetCDF_FORTRAN_DIR "/usr/lib/" CACHE PATH "Path to NetCDF Fortran library")
set(NetCDF_C_DIR "/usr/lib/" CACHE PATH "Path to NetCDF C library")
Then I did the command : $ cmake -C TEST
And I obtained this error:
### Configuring NetCDF
-- NetCDF include file /usr/include/netcdf.inc will be searched for define values
CMake Error at /home/christelle/Model/schism/cmake/modules/FindNetCDF.cmake:264 (message):
Can not locate NetCDF C library
Call Stack (most recent call first):
CMakeLists.txt:129 (find_package)
CMake Warning at /home/christelle/Model/schism/cmake/modules/FindNetCDF.cmake:268 (message):
********
Can not locate separate NetCDF Fortran library (netcdff) in your NetCDF
Installation.
For older versions of NetCDF the fortran library was not separate
(everything was in the netcdf library) and this is not a problem.
If you experience a lot of linker errors for symbols starting with 'nf_' ,
lack of netcdff is a likely cause.
Call Stack (most recent call first):
CMakeLists.txt:129 (find_package)
CMake Error at /usr/share/cmake-3.16/Modules/FindPackageHandleStandardArgs.cmake:146 (message):
Could NOT find NetCDF (missing: NetCDF_LIBRARIES)
Call Stack (most recent call first):
/usr/share/cmake-3.16/Modules/FindPackageHandleStandardArgs.cmake:393 (_FPHSA_FAILURE_MESSAGE)
/home/christelle/Model/schism/cmake/modules/FindNetCDF.cmake:336 (find_package_handle_standard_args)
CMakeLists.txt:129 (find_package)
-- Configuring incomplete, errors occurred!
See also "/home/christelle/Model/schism/build/CMakeFiles/CMakeOutput.log".
What am I doing wrong?
Could you help me please?
Here is the script for FindNetCDF.cmake:
# -*- mode: cmake -*-
#
# MSTK NetCDF Find Module
# Shamelessly stolen from Amanzi open source code https://software.lanl.gov/ascem/trac
#
# Usage:
# Control the search through NetCDF_DIR or setting environment variable
# NetCDF_ROOT to the NetCDF installation prefix.
#
# This module does not search default paths!
#
# Following variables are set:
# NetCDF_FOUND (BOOL) Flag indicating if NetCDF was found
# NetCDF_INCLUDE_DIR (PATH) Path to the NetCDF include file
# NetCDF_INCLUDE_DIRS (LIST) List of all required include files
# NetCDF_LIBRARY_DIR (PATH) Path to the NetCDF library
# NetCDF_LIBRARY (FILE) NetCDF library
# NetCDF_LIBRARIES (LIST) List of all required NetCDF libraries
#
# Additional variables set
# NetCDF_C_LIBRARY_DIR (PATH) Path to the NetCDF C library
# NetCDF_C_LIBRARY (FILE) NetCDF C library
# NetCDF_CXX_LIBRARY (FILE) NetCDF C++ library
# NetCDF_LARGE_DIMS (BOOL) Checks the header files for size of
# NC_MAX_DIMS, NC_MAX_VARS and NC_MAX_VARS_DIMS
# Returns TRUE if
# NC_MAX_DIMS >= 655363
# NC_MAX_VARS >= 524288
# NC_MAX_VAR_DIMS >= 8
#
# #############################################################################
# Standard CMake modules see CMAKE_ROOT/Modules
include(FindPackageHandleStandardArgs)
# MSTK CMake functions see <root>/cmake/modules for source
#include(PrintVariable)
#include(AddPackageDependency)
if ( NetCDF_LIBRARIES AND NetCDF_INCLUDE_DIRS )
# Do nothing. Variables are set. No need to search again
else(NetCDF_LIBRARIES AND NetCDF_INCLUDE_DIRS)
# Cache variables
if(NetCDF_DIR)
set(NetCDF_DIR "${NetCDF_DIR}" CACHE PATH "Path to search for NetCDF include and library files")
endif()
if(NetCDF_INCLUDE_DIR)
set(NetCDF_INCLUDE_DIR "${NetCDF_INCLUDE_DIR}" CACHE PATH "Path to search for NetCDF include files")
endif()
if(NetCDF_LIBRARY_DIR)
set(NetCDF_LIBRARY_DIR "${NetCDF_LIBRARY_DIR}" CACHE PATH "Path to search for NetCDF library files")
endif()
# Search for include files
# Search order preference:
# (1) NetCDF_INCLUDE_DIR - check existence of path AND if the include files exist
# (2) NetCDF_DIR/<include>
# (3) Default CMake paths See cmake --html-help=out.html file for more information.
#
set(netcdf_inc_names "netcdf.inc")
if (NetCDF_INCLUDE_DIR)
if (EXISTS "${NetCDF_INCLUDE_DIR}")
find_path(cdf_test_include_path
NAMES ${netcdf_inc_names}
HINTS ${NetCDF_INCLUDE_DIR}
NO_DEFAULT_PATH)
if(NOT cdf_test_include_path)
message(SEND_ERROR "Can not locate ${netcdf_inc_names} in ${NetCDF_INCLUDE_DIR}")
endif()
set(NetCDF_INCLUDE_DIR "${cdf_test_include_path}")
else()
message(SEND_ERROR "NetCDF_INCLUDE_DIR=${NetCDF_INCLUDE_DIR} does not exist")
set(NetCDF_INCLUDE_DIR "NetCDF_INCLUDE_DIR-NOTFOUND")
endif()
else()
set(netcdf_inc_suffixes "include")
if(NetCDF_DIR)
if (EXISTS "${NetCDF_DIR}" )
find_path(NetCDF_INCLUDE_DIR
NAMES ${netcdf_inc_names}
HINTS ${NetCDF_DIR}
PATH_SUFFIXES ${netcdf_inc_suffixes}
NO_DEFAULT_PATH)
else()
message(SEND_ERROR "NetCDF_DIR=${NetCDF_DIR} does not exist")
set(NetCDF_INCLUDE_DIR "NetCDF_INCLUDE_DIR-NOTFOUND")
endif()
elseif(NetCDF_FORTRAN_DIR)
if (EXISTS "${NetCDF_FORTRAN_DIR}" )
find_path(NetCDF_INCLUDE_DIR
NAMES ${netcdf_inc_names}
HINTS ${NetCDF_FORTRAN_DIR}
PATH_SUFFIXES ${netcdf_inc_suffixes}
NO_DEFAULT_PATH)
else()
message(SEND_ERROR "NetCDF_FORTRAN_DIR=${NetCDF_FORTRAN_DIR} does not exist")
set(NetCDF_INCLUDE_DIR "NetCDF_INCLUDE_DIR-NOTFOUND")
endif()
else()
find_path(NetCDF_INCLUDE_DIR
NAMES ${netcdf_inc_names}
PATH_SUFFIXES ${netcdf_inc_suffixes})
endif()
endif()
if ( NOT NetCDF_INCLUDE_DIR )
message(SEND_ERROR "Can not locate NetCDF include directory")
endif()
# Large dimension check here
if ( NetCDF_INCLUDE_DIR )
set(netcdf_h "${NetCDF_INCLUDE_DIR}/netcdf.inc" )
message(STATUS "NetCDF include file ${netcdf_h} will be searched for define values")
file(STRINGS "${netcdf_h}" netcdf_max_dims_string REGEX "^#define NC_MAX_DIMS")
string(REGEX REPLACE "[^0-9]" "" netcdf_max_dims "${netcdf_max_dims_string}")
file(STRINGS "${netcdf_h}" netcdf_max_vars_string REGEX "^#define NC_MAX_VARS")
string(REGEX REPLACE "[^0-9]" "" netcdf_max_vars "${netcdf_max_vars_string}")
file(STRINGS "${netcdf_h}" netcdf_max_var_dims_string REGEX "^#define NC_MAX_VAR_DIMS")
string(REGEX REPLACE "[^0-9]" "" netcdf_max_var_dims "${netcdf_max_var_dims_string}")
#if (
# ( (netcdf_max_dims EQUAL 65536) OR (netcdf_max_dims GREATER 65536) ) AND
# ( (netcdf_max_vars EQUAL 524288) OR (netcdf_max_vars GREATER 524288) ) AND
# ( (netcdf_max_var_dims EQUAL 8) OR (netcdf_max_var_dims GREATER 8) )
# )
# set(NetCDF_LARGE_DIMS TRUE)
#else()
# message(WARNING "The NetCDF found in ${NetCDF_DIR} does not have the correct NC_MAX_DIMS, NC_MAX_VARS and NC_MAX_VAR_DIMS\n"
# "It may not be compatible with other TPL libraries such MOAB and ExodusII\n" )
# set(NetCDF_LARGE_DIMS FALSE)
#endif()
endif()
# Search for libraries
# Search order preference:
# (1) NetCDF_LIBRARY_DIR - check existence of path AND if the include files exist
# (2) NetCDF_DIR/<lib,Lib>
# (3) Default CMake paths See cmake --html-help=out.html file for more information.
#
if (NetCDF_LIBRARY_DIR)
if (EXISTS "${NetCDF_LIBRARY_DIR}")
if (NOT EXISTS "${NetCDF_C_LIBRARY_DIR}")
set(NetCDF_C_LIBRARY_DIR "${NetCDF_LIBRARY_DIR}")
endif()
find_library(NetCDF_C_LIBRARY
NAMES netcdf
HINTS ${NetCDF_C_LIBRARY_DIR}
NO_DEFAULT_PATH)
find_library(NetCDF_Fortran_LIBRARY
NAMES netcdff
HINTS ${NetCDF_FORTRAN_LIBRARY_DIR}
NO_DEFAULT_PATH)
else()
message(SEND_ERROR "NetCDF_LIBRARY_DIR=${NetCDF_LIBRARY_DIR} does not exist")
set(NetCDF_LIBRARY "NetCDF_C_LIBRARY-NOTFOUND")
# set(NetCDF_LIBRARY "NetCDF_CXX_LIBRARY-NOTFOUND")
endif()
else()
if(NetCDF_DIR)
if (EXISTS "${NetCDF_DIR}" )
find_library(NetCDF_C_LIBRARY
NAMES netcdf
HINTS ${NetCDF_DIR}
PATH_SUFFIXES "lib" "Lib"
NO_DEFAULT_PATH)
find_library(NetCDF_Fortran_LIBRARY
NAMES netcdff
HINTS ${NetCDF_DIR}
PATH_SUFFIXES "lib" "Lib"
NO_DEFAULT_PATH)
# find_library(NetCDF_CXX_LIBRARY
# NAMES netcdf_c++
# HINTS ${NetCDF_DIR}
# PATH_SUFFIXES "lib" "Lib"
# NO_DEFAULT_PATH)
else()
message(SEND_ERROR "NetCDF_DIR=${NetCDF_DIR} does not exist")
set(NetCDF_LIBRARY "NetCDF_C_LIBRARY-NOTFOUND")
set(NetCDF_LIBRARY "NetCDF_FORTRAN_LIBRARY-NOTFOUND")
endif()
else()
if(NetCDF_C_DIR)
if (EXISTS "${NetCDF_C_DIR}")
find_library(NetCDF_C_LIBRARY
NAMES netcdf
HINTS ${NetCDF_C_DIR}
PATH_SUFFIXES "lib" "Lib"
NO_DEFAULT_PATH)
else()
message(SEND_ERROR "NetCDF_C_DIR=${NetCDF_C_DIR} does not exist")
set(NetCDF_LIBRARY "NetCDF_C_LIBRARY-NOTFOUND")
endif()
endif()
if(NetCDF_FORTRAN_DIR)
if (EXISTS "${NetCDF_FORTRAN_DIR}")
find_library(NetCDF_Fortran_LIBRARY
NAMES netcdff
HINTS ${NetCDF_FORTRAN_DIR}
PATH_SUFFIXES "lib" "Lib"
NO_DEFAULT_PATH)
else()
message(SEND_ERROR "NetCDF_FORTRAN_DIR=${NetCDF_FORTRAN_DIR} does not exist")
set(NetCDF_LIBRARY "NetCDF_FORTRAN_LIBRARY-NOTFOUND")
endif()
endif()
# find_library(NetCDF_C_LIBRARY
# NAMES netcdf
# PATH_SUFFIXES ${netcdf_lib_suffixes})
# find_library(NetCDF_Fortran_LIBRARY
# NAMES netcdff
# PATH_SUFFIXES ${netcdf_lib_suffixes})
# endif()
endif()
endif()
if ( NOT NetCDF_C_LIBRARY )
message(SEND_ERROR "Can not locate NetCDF C library")
endif()
if ( NOT NetCDF_Fortran_LIBRARY )
message(WARNING "\n********\nCan not locate separate NetCDF Fortran library (netcdff) in your NetCDF Installation. \nFor older versions of NetCDF the fortran library was not separate (everything was in the netcdf library) and this is not a problem. \nIf you experience a lot of linker errors for symbols starting with 'nf_' , lack of netcdff is a likely cause.")
endif()
# if ( NOT NetCDF_CXX_LIBRARY )
# message(SEND_ERROR "Can not locate NetCDF CXX library")
# endif()
# Define the LIBRARIES and INCLUDE_DORS
set(NetCDF_INCLUDE_DIRS ${NetCDF_INCLUDE_DIR})
set(NetCDF_LIBRARIES ${NetCDF_C_LIBRARY} ${NetCDF_Fortran_LIBRARY})
# Need to find the NetCDF config script to check for HDF5
if ( NetCDF_DIR OR NetCDF_BIN_DIR )
find_program(netcdf_config nc-config
HINTS ${NetCDF_DIR} ${NetCDF_BIN_DIR}
PATH_SUFFIXES bin Bin
DOC "NetCDF configuration script")
if (netcdf_config AND (NOT (${CMAKE_SYSTEM_NAME} MATCHES "Windows")))
message(STATUS "Found NetCDF configuration script: ${netcdf_config}")
execute_process(COMMAND "${netcdf_config}" "--has-hdf5"
RESULT_VARIABLE _ret_code
OUTPUT_VARIABLE _stdout
ERROR_VARIABLE _stderr
)
string(REGEX REPLACE "[\n\r ]" "" _hdf5_answer ${_stdout})
message(STATUS "${netcdf_config} --has-hdf5 returned '${_hdf5_answer}'")
string(COMPARE EQUAL "${_hdf5_answer}" "yes" _has_hdf5)
if (${_has_hdf5} )
set(NetCDF_NEEDS_HDF5 True)
else()
message(STATUS "NetCDF does not require HDF5")
endif()
execute_process(COMMAND "${netcdf_config}" "--version"
RESULT_VARIABLE _ret_code
OUTPUT_VARIABLE _stdout
ERROR_VARIABLE _stderr
)
string(REGEX REPLACE "[\n\r ]" "" _netcdf_version ${_stdout})
string(REGEX REPLACE "netCDF" "" _netcdf_version ${_netcdf_version})
message(STATUS "netcdf version: ${_netcdf_version}")
if(${_netcdf_version} VERSION_GREATER "4")
set( NETCDF_4 TRUE)
message(STATUS "support nc4:${NETCDF_4}")
endif()
else()
if (NOT DEFINED NetCDF_NEEDS_HDF5)
message(SEND_ERROR "netcdf_config not available, NetCDF_NEEDS_HDF5 must be set manually")
endif()
endif()
endif()
if(NetCDF_NEEDS_HDF5)
message(STATUS "NetCDF requires HDF5")
#add_package_dependency(NetCDF DEPENDS_ON HDF5)
endif()
endif(NetCDF_LIBRARIES AND NetCDF_INCLUDE_DIRS )
# Send useful message if everything is found
find_package_handle_standard_args(NetCDF DEFAULT_MSG
NetCDF_LIBRARIES
NetCDF_INCLUDE_DIRS)
# find_package)handle)standard_args should set NetCDF_FOUND but it does not!
if ( NetCDF_LIBRARIES AND NetCDF_INCLUDE_DIRS)
set(NetCDF_FOUND TRUE)
else()
set(NetCDF_FOUND FALSE)
endif()
mark_as_advanced(
NetCDF_INCLUDE_DIR
NetCDF_INCLUDE_DIRS
NetCDF_C_LIBRARY
NetCDF_CXX_LIBRARY
NetCDF_LIBRARIES
NetCDF_LIBRARY_DIR
)
Thanks
I am a beginner at Clojure. I am performing one operation twice, but with changed symbols lang against language
One case it is working well, another is throwing an error:
java.lang.IllegalArgumentException: No method in multimethod 'my-method' for dispatch value: null
I am not sure if it is caused by an Clojure syntax or there is something wrong in my linux configuration. I have Debian Stretch and boot.clj.
The error happens in the terminal. Here you are the both peaces of code an the error:
s#lokal:~$ boot repl
nREPL server started on port 36091 on host 127.0.0.1 - nrepl://127.0.0.1:36091
java.lang.Exception: No namespace: reply.eval-modes.nrepl found
REPL-y 0.4.1, nREPL 0.4.4
Clojure 1.8.0
OpenJDK 64-Bit Server VM 1.8.0_181-8u181-b13-2~deb9u1-b13
Exit: Control+D or (exit) or (quit)
Commands: (user/help)
Docs: (doc function-name-here)
(find-doc "part-of-name-here")
Find by Name: (find-name "part-of-name-here")
Source: (source function-name-here)
Javadoc: (javadoc java-object-or-class-here)
Examples from clojuredocs.org: [clojuredocs or cdoc]
(user/clojuredocs name-here)
(user/clojuredocs "ns-here" "name-here")
boot.user=> (do
#_=> (defmulti my-method (fn[x] (x "lang")))
#_=> (defmethod my-method "English" [params] "Hello!")
#_=> (def english-map {"id" "1", "lang" "English"})
#_=> (my-method english-map)
#_=> )
"Hello!"
boot.user=>
boot.user=> (do
#_=> (defmulti my-method (fn[x] (x "language")))
#_=> (defmethod my-method "English" [params] "Hello!")
#_=> (def english-map {"id" "1", "language" "English"})
#_=> (my-method english-map)
#_=> )
java.lang.IllegalArgumentException: No method in multimethod 'my-method' for dispatch value: null
boot.user=>
I must add that before it worked with language but not with lang. It also turned to work or did not when I was changing a my-method symbol name with mymetho-d or greeting.
defmulti defines a var, and subsequent calls to defmulti with the same name do nothing, so your second defmulti call is ineffective and the original dispatch function remains. There's remove-method and remove-all-methods for removing defmethod definitions, but to remove a defmulti definition (without restarting REPL) you can use alter-var-root to set the var to nil:
(defmulti my-method (fn [x] (x "lang")))
(defmethod my-method "English" [params] "Hello!")
(def english-map {"id" "1", "lang" "English"})
(my-method english-map)
=> "Hello!"
(alter-var-root #'my-method (constantly nil)) ;; set my-method var to nil
(def english-map {"id" "1", "language" "English"})
(defmulti my-method (fn [x] (x "language")))
(defmethod my-method "English" [params] "Hello!")
(my-method english-map)
=> "Hello!"
You can use ns-unmap to similar effect:
(ns-unmap *ns* 'my-method)
I would like to resolve my problem for classpath.Could you tell me how to do?
I can do the following.
I can eval (+ 1 1) in vim(slimv) by pushing ,e.
Then slimv display the followings.
user>
(+ 1 1)
2
However,I can not do the followings.
1.When I eval the the following code in vim(slimv) by pushing ,e,
(use '[clojure.contrib.str-utils :only (re-split)])
2.Slimv displays the following error.
; Evaluation aborted on java.io.FileNotFoundException: Could not locate clojure/contrib/str_utils__init.class or clojure/contrib/str_utils.clj on classpath:
My enviroment is the followings.
macvim 7.3.754
lein 2.0.0
slimv 0.9.9
ritz 0.7.0
project.clj
(defproject helloworld "1.0.0-SNAPSHOT"
:description "FIXME: write description"
:url "http://example.com/FIXME"
:license {:name "Eclipse Public License"
:url "http://www.eclipse.org/legal/epl-v10.html"}
:dependencies [[org.clojure/clojure "1.4.0"]]
:plugins [[lein-ritz "0.7.0"]]
)
~/.lein/profiles.clj
{:user {:plugins [
[lein-ritz "0.7.0"]
]}}
ref
lein ritz setup error
Have you tried clojure.string?
(use '[clojure.string :only (split)])
(split "clojure8*)-6contrib&(*does^&$not*_^%exist^*#anymore" #"[^a-zA-Z]+")
=> ["clojure" "contrib" "does" "not" "exist" "anymore"]
BTW all clojure.contrib have been migrated to separated libraries http://dev.clojure.org/display/design/Where+Did+Clojure.Contrib+Go
I'm able to open a zip entry and tweak the contents, but I can't figure out how to save them.
The API for CL ZIP is wanting.
Specs:
ZIP
Quicklisp
MacPorts CLISP 2.49
Aquamacs 2.1 (Emacs 23.2)
Mac OS X 10.6.4
MacBook Pro 5,1
; Removes newlines at the beginning of PRE tags in Sigil-edited ePub files.
;
; See http://code.google.com/p/sigil/issues/detail?id=655
;
; Andrew Pennebaker
; 16 Nov 2010
; Requires Quicklisp.
; http://www.quicklisp.org/
(ql:quickload "zip")
(ql:quickload "cl-ppcre")
(defvar *epub* nil)
(defvar *epub-contents* nil)
(defvar *epub-out* nil)
(defun load-epub (filename)
(setq *epub* (zip:open-zipfile filename)))
(defun close-epub ()
(zip:close-zipfile *epub*)
(setq *epub* nil)
(setq *epub-contents* nil))
(defun gather-epub-contents ()
(zip:do-zipfile-entries (name entry *epub*)
(push name *epub-contents*)))
(defun is-html-file (name)
(if (cl-ppcre:scan ".+\\.htm[l]?$" name) t nil))
(defun entry-name-to-html (name)
(flexi-streams:octets-to-string
(zip:zipfile-entry-contents
(zip:get-zipfile-entry name *epub*))))
(defun clean (html)
(values
(cl-ppcre:regex-replace-all
"<pre[^>]*>(\\s)*"
(cl-ppcre:regex-replace-all "\\s+</pre>" html "</pre>")
"<pre>")))
As per the documentation of the Common Lisp ZIP library, you have to obtain a separate handle to write to the zip file. Probably you can extract the contents to a folder, tweak the contents and compress the whole folder with a single call to (zip path-name source-folder).
Using a temp file, you're able to unzip, modify, and rezip.
See sigil-clean.cl