diff --git a/README.md b/README.md index 8664953c0cc697b2d0f5371cd9f073d1a757dc28..1977a4fee9641954eed2cb30d57b9ea33cbf53dd 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,8 @@ can join it here: At the moment, contributing to Spack is relatively simple. Just send us a [pull request](https://help.github.com/articles/using-pull-requests/). -When you send your request, make ``develop`` the destination branch. +When you send your request, make ``develop`` the destination branch on the +[Spack repository](https://github.com/LLNL/spack). Spack is using a rough approximation of the [Git Flow](http://nvie.com/posts/a-successful-git-branching-model/) diff --git a/etc/spack/modules.yaml b/etc/spack/modules.yaml new file mode 100644 index 0000000000000000000000000000000000000000..aa2a2c3fe2990d976c1c3ca6c682a149b4b6a4bf --- /dev/null +++ b/etc/spack/modules.yaml @@ -0,0 +1,8 @@ +# ------------------------------------------------------------------------- +# This is the default spack module files generation configuration. +# +# Changes to this file will affect all users of this spack install, +# although users can override these settings in their ~/.spack/modules.yaml. +# ------------------------------------------------------------------------- +modules: + enable: ['tcl', 'dotkit'] diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index accf09cc2a437e1e94f7565fc0aecdccf887b4ae..68f3d07b297c632669b1604d3ac0e1d3429f8599 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -149,26 +149,46 @@ customize an installation in :ref:`sec-specs`. ``spack uninstall`` ~~~~~~~~~~~~~~~~~~~~~ -To uninstall a package, type ``spack uninstall <package>``. This will -completely remove the directory in which the package was installed. +To uninstall a package, type ``spack uninstall <package>``. This will ask the user for +confirmation, and in case will completely remove the directory in which the package was installed. .. code-block:: sh spack uninstall mpich If there are still installed packages that depend on the package to be -uninstalled, spack will refuse to uninstall it. You can override this -behavior with ``spack uninstall -f <package>``, but you risk breaking -other installed packages. In general, it is safer to remove dependent -packages *before* removing their dependencies. +uninstalled, spack will refuse to uninstall it. -A line like ``spack uninstall mpich`` may be ambiguous, if multiple -``mpich`` configurations are installed. For example, if both +To uninstall a package and every package that depends on it, you may give the +`--dependents` option. + +.. code-block:: sh + + spack uninstall --dependents mpich + +will display a list of all the packages that depends on `mpich` and, upon confirmation, +will uninstall them in the right order. + +A line like + +.. code-block:: sh + + spack uninstall mpich + +may be ambiguous, if multiple ``mpich`` configurations are installed. For example, if both ``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer to either one. Because it cannot determine which one to uninstall, -Spack will ask you to provide a version number to remove the -ambiguity. As an example, ``spack uninstall mpich@3.1`` is -unambiguous in this scenario. +Spack will ask you either to provide a version number to remove the +ambiguity or use the ``--all`` option to uninstall all of the matching packages. + +You may force uninstall a package with the `--force` option + +.. code-block:: sh + + spack uninstall --force mpich + +but you risk breaking other installed packages. In general, it is safer to remove dependent +packages *before* removing their dependencies or use the `--dependents` option. Seeing installed packages @@ -774,6 +794,34 @@ Environment modules Spack provides some limited integration with environment module systems to make it easier to use the packages it provides. + +Installing Environment Modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In order to use Spack's generated environment modules, you must have +installed the *Environment Modules* package. On many Linux +distributions, this can be installed from the vendor's repository. +For example: ```yum install environment-modules`` +(Fedora/RHEL/CentOS). If your Linux distribution does not have +Environment Modules, you can get it with Spack: + +1. Install with:: + + spack install environment-modules + +2. Activate with:: + + MODULES_HOME=`spack location -i environment-modules` + MODULES_VERSION=`ls -1 $MODULES_HOME/Modules | head -1` + ${MODULES_HOME}/Modules/${MODULES_VERSION}/bin/add.modules + +This adds to your ``.bashrc`` (or similar) files, enabling Environment +Modules when you log in. It will ask your permission before changing +any files. + +Spack and Environment Modules +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + You can enable shell support by sourcing some files in the ``/share/spack`` directory. diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 4a3e6eddc9438bcd3da73ff0202656a9409b658b..18fd8f7bdb1f28f071f7c52255a3cda624d35c6e 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -39,7 +39,7 @@ # # This is the list of environment variables that need to be set before -# the script runs. They are set by routines in spack.build_environment +# the script runs. They are set by routines in spack.build_environment # as part of spack.package.Package.do_install(). parameters=" SPACK_PREFIX @@ -50,7 +50,7 @@ SPACK_SHORT_SPEC" # The compiler input variables are checked for sanity later: # SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC -# Debug flag is optional; set to true for debug logging: +# Debug flag is optional; set to "TRUE" for debug logging: # SPACK_DEBUG # Test command is used to unit test the compiler script. # SPACK_TEST_COMMAND @@ -65,12 +65,11 @@ function die { } for param in $parameters; do - if [ -z "${!param}" ]; then - die "Spack compiler must be run from spack! Input $param was missing!" + if [[ -z ${!param} ]]; then + die "Spack compiler must be run from Spack! Input '$param' is missing." fi done -# # Figure out the type of compiler, the language, and the mode so that # the compiler script knows what to do. # @@ -78,14 +77,18 @@ done # 'command' is set based on the input command to $SPACK_[CC|CXX|F77|F90] # # 'mode' is set to one of: +# vcheck version check +# cpp preprocess # cc compile +# as assemble # ld link # ccld compile & link -# cpp preprocessor -# vcheck version check -# + command=$(basename "$0") case "$command" in + cpp) + mode=cpp + ;; cc|c89|c99|gcc|clang|icc|pgcc|xlc) command="$SPACK_CC" language="C" @@ -102,9 +105,6 @@ case "$command" in command="$SPACK_F77" language="Fortran 77" ;; - cpp) - mode=cpp - ;; ld) mode=ld ;; @@ -113,10 +113,12 @@ case "$command" in ;; esac -# If any of the arguments below is present then the mode is vcheck. In vcheck mode nothing is added in terms of extra search paths or libraries -if [ -z "$mode" ]; then +# If any of the arguments below are present, then the mode is vcheck. +# In vcheck mode, nothing is added in terms of extra search paths or +# libraries. +if [[ -z $mode ]]; then for arg in "$@"; do - if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then + if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then mode=vcheck break fi @@ -124,14 +126,16 @@ if [ -z "$mode" ]; then fi # Finish setting up the mode. - -if [ -z "$mode" ]; then +if [[ -z $mode ]]; then mode=ccld for arg in "$@"; do - if [ "$arg" = -E ]; then + if [[ $arg == -E ]]; then mode=cpp break - elif [ "$arg" = -c ]; then + elif [[ $arg == -S ]]; then + mode=as + break + elif [[ $arg == -c ]]; then mode=cc break fi @@ -139,175 +143,76 @@ if [ -z "$mode" ]; then fi # Dump the version and exit if we're in testing mode. -if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then +if [[ $SPACK_TEST_COMMAND == dump-mode ]]; then echo "$mode" exit fi # Check that at least one of the real commands was actually selected, # otherwise we don't know what to execute. -if [ -z "$command" ]; then +if [[ -z $command ]]; then die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs." fi -# Save original command for debug logging -input_command="$@" - -if [ "$mode" == vcheck ] ; then +if [[ $mode == vcheck ]]; then exec ${command} "$@" fi -# -# Now do real parsing of the command line args, trying hard to keep -# non-rpath linker arguments in the proper order w.r.t. other command -# line arguments. This is important for things like groups. -# -includes=() -libraries=() -libs=() -rpaths=() -other_args=() - -while [ -n "$1" ]; do - case "$1" in - -I*) - arg="${1#-I}" - if [ -z "$arg" ]; then shift; arg="$1"; fi - includes+=("$arg") - ;; - -L*) - arg="${1#-L}" - if [ -z "$arg" ]; then shift; arg="$1"; fi - libraries+=("$arg") - ;; - -l*) - arg="${1#-l}" - if [ -z "$arg" ]; then shift; arg="$1"; fi - libs+=("$arg") - ;; - -Wl,*) - arg="${1#-Wl,}" - # TODO: Handle multiple -Wl, continuations of -Wl,-rpath - if [[ $arg == -rpath=* ]]; then - arg="${arg#-rpath=}" - for rpath in ${arg//,/ }; do - rpaths+=("$rpath") - done - elif [[ $arg == -rpath,* ]]; then - arg="${arg#-rpath,}" - for rpath in ${arg//,/ }; do - rpaths+=("$rpath") - done - elif [[ $arg == -rpath ]]; then - shift; arg="$1" - if [[ $arg != '-Wl,'* ]]; then - die "-Wl,-rpath was not followed by -Wl,*" - fi - arg="${arg#-Wl,}" - for rpath in ${arg//,/ }; do - rpaths+=("$rpath") - done - else - other_args+=("-Wl,$arg") - fi - ;; - -Xlinker) - shift; arg="$1"; - if [[ $arg = -rpath=* ]]; then - rpaths+=("${arg#-rpath=}") - elif [[ $arg = -rpath ]]; then - shift; arg="$1" - if [[ $arg != -Xlinker ]]; then - die "-Xlinker -rpath was not followed by -Xlinker <arg>" - fi - shift; arg="$1" - rpaths+=("$arg") - else - other_args+=("-Xlinker") - other_args+=("$arg") - fi - ;; - *) - other_args+=("$1") - ;; - esac - shift -done - -# Dump parsed values for unit testing if asked for -if [ -n "$SPACK_TEST_COMMAND" ]; then - IFS=$'\n' - case "$SPACK_TEST_COMMAND" in - dump-includes) echo "${includes[*]}";; - dump-libraries) echo "${libraries[*]}";; - dump-libs) echo "${libs[*]}";; - dump-rpaths) echo "${rpaths[*]}";; - dump-other-args) echo "${other_args[*]}";; - dump-all) - echo "INCLUDES:" - echo "${includes[*]}" - echo - echo "LIBRARIES:" - echo "${libraries[*]}" - echo - echo "LIBS:" - echo "${libs[*]}" - echo - echo "RPATHS:" - echo "${rpaths[*]}" - echo - echo "ARGS:" - echo "${other_args[*]}" - ;; - *) - echo "ERROR: Unknown test command" - exit 1 ;; - esac - exit +# Darwin's linker has a -r argument that merges object files together. +# It doesn't work with -rpath. +# This variable controls whether they are added. +add_rpaths=true +if [[ mode == ld && $OSTYPE == darwin* ]]; then + for arg in "$@"; do + if [[ $arg == -r ]]; then + add_rpaths=false + break + fi + done fi +# Save original command for debug logging +input_command="$@" +args=("$@") + # Read spack dependencies from the path environment variable IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES" for dep in "${deps[@]}"; do - if [ -d "$dep/include" ]; then - includes+=("$dep/include") + # Prepend include directories + if [[ -d $dep/include ]]; then + if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then + args=("-I$dep/include" "${args[@]}") + fi fi - if [ -d "$dep/lib" ]; then - libraries+=("$dep/lib") - rpaths+=("$dep/lib") + # Prepend lib and RPATH directories + if [[ -d $dep/lib ]]; then + if [[ $mode == ccld ]]; then + $add_rpaths && args=("-Wl,-rpath,$dep/lib" "${args[@]}") + args=("-L$dep/lib" "${args[@]}") + elif [[ $mode == ld ]]; then + $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}") + args=("-L$dep/lib" "${args[@]}") + fi fi - if [ -d "$dep/lib64" ]; then - libraries+=("$dep/lib64") - rpaths+=("$dep/lib64") + # Prepend lib64 and RPATH directories + if [[ -d $dep/lib64 ]]; then + if [[ $mode == ccld ]]; then + $add_rpaths && args=("-Wl,-rpath,$dep/lib64" "${args[@]}") + args=("-L$dep/lib64" "${args[@]}") + elif [[ $mode == ld ]]; then + $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}") + args=("-L$dep/lib64" "${args[@]}") + fi fi done # Include all -L's and prefix/whatever dirs in rpath -for dir in "${libraries[@]}"; do - [[ dir = $SPACK_INSTALL* ]] && rpaths+=("$dir") -done -rpaths+=("$SPACK_PREFIX/lib") -rpaths+=("$SPACK_PREFIX/lib64") - -# Put the arguments together -args=() -for dir in "${includes[@]}"; do args+=("-I$dir"); done -args+=("${other_args[@]}") -for dir in "${libraries[@]}"; do args+=("-L$dir"); done -for lib in "${libs[@]}"; do args+=("-l$lib"); done - -if [ "$mode" = ccld ]; then - for dir in "${rpaths[@]}"; do - args+=("-Wl,-rpath") - args+=("-Wl,$dir"); - done -elif [ "$mode" = ld ]; then - for dir in "${rpaths[@]}"; do - args+=("-rpath") - args+=("$dir"); - done +if [[ $mode == ccld ]]; then + $add_rpaths && args=("-Wl,-rpath,$SPACK_PREFIX/lib" "-Wl,-rpath,$SPACK_PREFIX/lib64" "${args[@]}") +elif [[ $mode == ld ]]; then + $add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "-rpath" "$SPACK_PREFIX/lib64" "${args[@]}") fi # @@ -323,34 +228,40 @@ unset DYLD_LIBRARY_PATH # IFS=':' read -ra env_path <<< "$PATH" IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH" -spack_env_dirs+=(".") +spack_env_dirs+=("" ".") PATH="" for dir in "${env_path[@]}"; do - remove="" - for rm_dir in "${spack_env_dirs[@]}"; do - if [ "$dir" = "$rm_dir" ]; then remove=True; fi - done - if [ -z "$remove" ]; then - if [ -z "$PATH" ]; then - PATH="$dir" - else - PATH="$PATH:$dir" + addpath=true + for env_dir in "${spack_env_dirs[@]}"; do + if [[ $dir == $env_dir ]]; then + addpath=false + break fi + done + if $addpath; then + PATH="${PATH:+$PATH:}$dir" fi done export PATH -full_command=("$command") -full_command+=("${args[@]}") +full_command=("$command" "${args[@]}") + +# In test command mode, write out full command for Spack tests. +if [[ $SPACK_TEST_COMMAND == dump-args ]]; then + echo "${full_command[@]}" + exit +elif [[ -n $SPACK_TEST_COMMAND ]]; then + die "ERROR: Unknown test command" +fi # # Write the input and output commands to debug logs if it's asked for. # -if [ "$SPACK_DEBUG" = "TRUE" ]; then +if [[ $SPACK_DEBUG == TRUE ]]; then input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log" output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log" - echo "$input_command" >> $input_log - echo "$mode ${full_command[@]}" >> $output_log + echo "[$mode] $command $input_command" >> $input_log + echo "[$mode] ${full_command[@]}" >> $output_log fi exec "${full_command[@]}" diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index c4665c284cf4ec04f8f18bd45bd8877ffd8ac431..70d46a7f77b62904e8bc6df06da6bf4fa74a8d6a 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -27,9 +27,11 @@ 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', 'set_executable', 'copy_mode', 'unset_executable_mode', - 'remove_dead_links', 'remove_linked_tree'] + 'remove_dead_links', 'remove_linked_tree', 'find_library_path', + 'fix_darwin_install_name'] import os +import glob import sys import re import shutil @@ -38,6 +40,7 @@ import getpass from contextlib import contextmanager, closing from tempfile import NamedTemporaryFile +import subprocess import llnl.util.tty as tty from spack.util.compression import ALLOWED_ARCHIVE_TYPES @@ -392,3 +395,44 @@ def remove_linked_tree(path): os.unlink(path) else: shutil.rmtree(path, True) + + +def fix_darwin_install_name(path): + """ + Fix install name of dynamic libraries on Darwin to have full path. + There are two parts of this task: + (i) use install_name('-id',...) to change install name of a single lib; + (ii) use install_name('-change',...) to change the cross linking between libs. + The function assumes that all libraries are in one folder and currently won't + follow subfolders. + + Args: + path: directory in which .dylib files are alocated + + """ + libs = glob.glob(join_path(path,"*.dylib")) + for lib in libs: + # fix install name first: + subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0] + long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n') + deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] + # fix all dependencies: + for dep in deps: + for loc in libs: + if dep == os.path.basename(loc): + subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0] + break + + +def find_library_path(libname, *paths): + """Searches for a file called <libname> in each path. + + Return: + directory where the library was found, if found. None otherwise. + + """ + for path in paths: + library = join_path(path, libname) + if os.path.exists(library): + return path + return None diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 13d301f84e6630f2f0956b3ac5b50b408cad9a5c..3b4e2c8352a24e9f5fd4db32c8b134c2eab53906 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -117,7 +117,8 @@ def caller_locals(): scope. Yes, this is some black magic, and yes it's useful for implementing things like depends_on and provides. """ - stack = inspect.stack() + # Passing zero here skips line context for speed. + stack = inspect.stack(0) try: return stack[2][0].f_locals finally: @@ -128,7 +129,8 @@ def get_calling_module_name(): """Make sure that the caller is a class definition, and return the enclosing module's name. """ - stack = inspect.stack() + # Passing zero here skips line context for speed. + stack = inspect.stack(0) try: # Make sure locals contain __module__ caller_locals = stack[2][0].f_locals diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index aee11f061f326b263099a4f13797c916a56c105b..9108e1d0e333d13af55555a4a72416718bbff668 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -136,9 +136,7 @@ # don't add a second username if it's already unique by user. if not _tmp_user in path: tmp_dirs.append(join_path(path, '%u', 'spack-stage')) - -for path in _tmp_candidates: - if not path in tmp_dirs: + else: tmp_dirs.append(join_path(path, 'spack-stage')) # Whether spack should allow installation of unsafe versions of diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 119a255a349896820f17627d7ac9cffad692286a..eb72f2a6b4a471397bc16341ea7f0d304e4bbd08 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -59,6 +59,11 @@ SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR' +# Platform-specific library suffix. +dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so' + + + class MakeExecutable(Executable): """Special callable executable object for make so the user can specify parallel or not on a per-invocation basis. Using @@ -208,7 +213,7 @@ def set_module_variables_for_package(pkg, module): # TODO: of build dependencies, as opposed to link dependencies. # TODO: Currently, everything is a link dependency, but tools like # TODO: this shouldn't be. - m.cmake = which("cmake") + m.cmake = Executable('cmake') # standard CMake arguments m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix, @@ -246,6 +251,9 @@ def set_module_variables_for_package(pkg, module): # a Prefix object. m.prefix = pkg.prefix + # Platform-specific library suffix. + m.dso_suffix = dso_suffix + def get_rpaths(pkg): """Get a list of all the rpaths for a package.""" @@ -270,21 +278,6 @@ def parent_class_modules(cls): return result -def setup_module_variables_for_dag(pkg): - """Set module-scope variables for all packages in the DAG.""" - for spec in pkg.spec.traverse(order='post'): - # If a user makes their own package repo, e.g. - # spack.repos.mystuff.libelf.Libelf, and they inherit from - # an existing class like spack.repos.original.libelf.Libelf, - # then set the module variables for both classes so the - # parent class can still use them if it gets called. - spkg = spec.package - modules = parent_class_modules(spkg.__class__) - for mod in modules: - set_module_variables_for_package(spkg, mod) - set_module_variables_for_package(spkg, spkg.module) - - def setup_package(pkg): """Execute all environment setup routines.""" spack_env = EnvironmentModifications() @@ -308,20 +301,27 @@ def setup_package(pkg): set_compiler_environment_variables(pkg, spack_env) set_build_environment_variables(pkg, spack_env) - setup_module_variables_for_dag(pkg) - # Allow dependencies to modify the module + # traverse in postorder so package can use vars from its dependencies spec = pkg.spec - for dependency_spec in spec.traverse(root=False): - dpkg = dependency_spec.package - dpkg.setup_dependent_package(pkg.module, spec) + for dspec in pkg.spec.traverse(order='post', root=False): + # If a user makes their own package repo, e.g. + # spack.repos.mystuff.libelf.Libelf, and they inherit from + # an existing class like spack.repos.original.libelf.Libelf, + # then set the module variables for both classes so the + # parent class can still use them if it gets called. + spkg = dspec.package + modules = parent_class_modules(spkg.__class__) + for mod in modules: + set_module_variables_for_package(spkg, mod) + set_module_variables_for_package(spkg, spkg.module) - # Allow dependencies to set up environment as well - for dependency_spec in spec.traverse(root=False): - dpkg = dependency_spec.package + # Allow dependencies to modify the module + dpkg = dspec.package + dpkg.setup_dependent_package(pkg.module, spec) dpkg.setup_dependent_environment(spack_env, run_env, spec) - # Allow the package to apply some settings. + set_module_variables_for_package(pkg, pkg.module) pkg.setup_environment(spack_env, run_env) # Make sure nothing's strange about the Spack environment. diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index e7abe7f4a5921d6219d8aa9b9d6ca7e2d987127c..c93db55c63c68b785701ace7a72630ae0ec0093e 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -52,7 +52,7 @@ def print_text_info(pkg): print "Safe versions: " if not pkg.versions: - print("None") + print(" None") else: pad = padder(pkg.versions, 4) for v in reversed(sorted(pkg.versions)): @@ -62,7 +62,7 @@ def print_text_info(pkg): print print "Variants:" if not pkg.variants: - print "None" + print " None" else: pad = padder(pkg.variants, 4) diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index 315d9fc9266a5bda6f58e3b3056a3c21aafd8e22..a67f5c0c137b86a8d07c5d1478e165ed93df9c1e 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -22,21 +22,16 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import sys import os import shutil -import argparse +import sys import llnl.util.tty as tty -from llnl.util.lang import partition_list -from llnl.util.filesystem import mkdirp - import spack.cmd +from llnl.util.filesystem import mkdirp from spack.modules import module_types from spack.util.string import * -from spack.spec import Spec - description ="Manipulate modules and dotkits." @@ -98,7 +93,6 @@ def module_refresh(): cls(spec).write() - def module(parser, args): if args.module_command == 'refresh': module_refresh() diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index cf478d3763e1b7c9efc8f378dc784467b143203b..20a3fc5fc229427e81075ab4d3009bb1aa68f578 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -77,7 +77,8 @@ def get_git(): def list_packages(rev): git = get_git() - relpath = spack.packages_path[len(spack.prefix + os.path.sep):] + os.path.sep + pkgpath = os.path.join(spack.packages_path, 'packages') + relpath = pkgpath[len(spack.prefix + os.path.sep):] + os.path.sep output = git('ls-tree', '--full-tree', '--name-only', rev, relpath, output=str) return sorted(line[len(relpath):] for line in output.split('\n') if line) diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index 5786780efb43e8b9e7f1dabc3550bb8121e1b904..975bb54ef7540e6a85c7d9babd9d6781fd7bfc9c 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -35,6 +35,9 @@ def setup_parser(subparser): subparser.add_argument( '-n', '--no-checksum', action='store_true', dest='no_checksum', help="Do not check downloaded packages against checksum") + subparser.add_argument( + '-p', '--path', dest='path', + help="Path to stage package, does not add to spack tree") subparser.add_argument( 'specs', nargs=argparse.REMAINDER, help="specs of packages to stage") @@ -50,4 +53,6 @@ def stage(parser, args): specs = spack.cmd.parse_specs(args.specs, concretize=True) for spec in specs: package = spack.repo.get(spec) + if args.path: + package.path = args.path package.do_stage() diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 350ef372cb6e92778e30090a064f04a50ea6120e..1ff3d8db5f185a44cd745913be1fa0fe1673c1f1 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -23,19 +23,33 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from __future__ import print_function -import sys + import argparse import llnl.util.tty as tty -from llnl.util.tty.colify import colify - import spack import spack.cmd import spack.repository from spack.cmd.find import display_specs -from spack.package import PackageStillNeededError -description="Remove an installed package" +description = "Remove an installed package" + +error_message = """You can either: + a) Use a more specific spec, or + b) use spack uninstall -a to uninstall ALL matching specs. +""" + + +def ask_for_confirmation(message): + while True: + tty.msg(message + '[y/n]') + choice = raw_input().lower() + if choice == 'y': + break + elif choice == 'n': + raise SystemExit('Operation aborted') + tty.warn('Please reply either "y" or "n"') + def setup_parser(subparser): subparser.add_argument( @@ -44,10 +58,101 @@ def setup_parser(subparser): subparser.add_argument( '-a', '--all', action='store_true', dest='all', help="USE CAREFULLY. Remove ALL installed packages that match each " + - "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + - "libelf are uninstalled. This is both useful and dangerous, like rm -r.") + "supplied spec. i.e., if you say uninstall libelf, ALL versions of " + + "libelf are uninstalled. This is both useful and dangerous, like rm -r.") subparser.add_argument( - 'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall") + '-d', '--dependents', action='store_true', dest='dependents', + help='Also uninstall any packages that depend on the ones given via command line.' + ) + subparser.add_argument( + '-y', '--yes-to-all', action='store_true', dest='yes_to_all', + help='Assume "yes" is the answer to every confirmation asked to the user.' + + ) + subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall") + + +def concretize_specs(specs, allow_multiple_matches=False, force=False): + """ + Returns a list of specs matching the non necessarily concretized specs given from cli + + Args: + specs: list of specs to be matched against installed packages + allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted) + + Return: + list of specs + """ + specs_from_cli = [] # List of specs that match expressions given via command line + has_errors = False + for spec in specs: + matching = spack.installed_db.query(spec) + # For each spec provided, make sure it refers to only one package. + # Fail and ask user to be unambiguous if it doesn't + if not allow_multiple_matches and len(matching) > 1: + tty.error("%s matches multiple packages:" % spec) + print() + display_specs(matching, long=True) + print() + has_errors = True + + # No installed package matches the query + if len(matching) == 0 and not force: + tty.error("%s does not match any installed packages." % spec) + has_errors = True + + specs_from_cli.extend(matching) + if has_errors: + tty.die(error_message) + + return specs_from_cli + + +def installed_dependents(specs): + """ + Returns a dictionary that maps a spec with a list of its installed dependents + + Args: + specs: list of specs to be checked for dependents + + Returns: + dictionary of installed dependents + """ + dependents = {} + for item in specs: + lst = [x for x in item.package.installed_dependents if x not in specs] + if lst: + lst = list(set(lst)) + dependents[item] = lst + return dependents + + +def do_uninstall(specs, force): + """ + Uninstalls all the specs in a list. + + Args: + specs: list of specs to be uninstalled + force: force uninstallation (boolean) + """ + packages = [] + for item in specs: + try: + # should work if package is known to spack + packages.append(item.package) + except spack.repository.UnknownPackageError as e: + # The package.py file has gone away -- but still + # want to uninstall. + spack.Package(item).do_uninstall(force=True) + + # Sort packages to be uninstalled by the number of installed dependents + # This ensures we do things in the right order + def num_installed_deps(pkg): + return len(pkg.installed_dependents) + + packages.sort(key=num_installed_deps) + for item in packages: + item.do_uninstall(force=force) def uninstall(parser, args): @@ -56,50 +161,34 @@ def uninstall(parser, args): with spack.installed_db.write_transaction(): specs = spack.cmd.parse_specs(args.packages) + # Gets the list of installed specs that match the ones give via cli + uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli + dependent_list = installed_dependents(uninstall_list) # takes care of '-d' - # For each spec provided, make sure it refers to only one package. - # Fail and ask user to be unambiguous if it doesn't - pkgs = [] - for spec in specs: - matching_specs = spack.installed_db.query(spec) - if not args.all and len(matching_specs) > 1: - tty.error("%s matches multiple packages:" % spec) - print() - display_specs(matching_specs, long=True) - print() - print("You can either:") - print(" a) Use a more specific spec, or") - print(" b) use spack uninstall -a to uninstall ALL matching specs.") - sys.exit(1) - - if len(matching_specs) == 0: - if args.force: continue - tty.die("%s does not match any installed packages." % spec) - - for s in matching_specs: - try: - # should work if package is known to spack - pkgs.append(s.package) - except spack.repository.UnknownPackageError as e: - # The package.py file has gone away -- but still - # want to uninstall. - spack.Package(s).do_uninstall(force=True) - - # Sort packages to be uninstalled by the number of installed dependents - # This ensures we do things in the right order - def num_installed_deps(pkg): - return len(pkg.installed_dependents) - pkgs.sort(key=num_installed_deps) - - # Uninstall packages in order now. - for pkg in pkgs: - try: - pkg.do_uninstall(force=args.force) - except PackageStillNeededError as e: - tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True)) + # Process dependent_list and update uninstall_list + has_error = False + if dependent_list and not args.dependents and not args.force: + for spec, lst in dependent_list.items(): + tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True)) print('') print("The following packages depend on it:") - display_specs(e.dependents, long=True) + display_specs(lst, long=True) print('') - print("You can use spack uninstall -f to force this action.") - sys.exit(1) + has_error = True + elif args.dependents: + for key, lst in dependent_list.items(): + uninstall_list.extend(lst) + uninstall_list = list(set(uninstall_list)) + + if has_error: + tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') + + if not args.yes_to_all: + tty.msg("The following packages will be uninstalled : ") + print('') + display_specs(uninstall_list, long=True) + print('') + ask_for_confirmation('Do you want to proceed ? ') + + # Uninstall everything on the list + do_uninstall(uninstall_list, args.force) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 2e576743ec6985be293cc03177876e49bb6ed1cb..ed9bf79868269571a9f74c6a06b36d717d1dbd6a 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -159,6 +159,10 @@ def concretize_version(self, spec): if any(v.satisfies(sv) for sv in spec.versions)], cmp=cmp_versions) + def prefer_key(v): + return pkg.versions.get(Version(v)).get('preferred', False) + valid_versions.sort(key=prefer_key, reverse=True) + if valid_versions: spec.versions = ver([valid_versions[0]]) else: diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 6afd69b3ac7f8db3fde0f52c68b0e0834b624626..14e5aaf4fb624e9532e56b2f2a2f60669b8b3152 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -237,7 +237,29 @@ 'type' : 'object', 'default' : {}, } - },},},},},} + },},},},},}, + 'modules': { + '$schema': 'http://json-schema.org/schema#', + 'title': 'Spack module file configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'modules:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'properties': { + 'enable': { + 'type': 'array', + 'default': [], + 'items': { + 'type': 'string' + } + } + } + }, + }, + }, } """OrderedDict of config scopes keyed by name. @@ -405,11 +427,11 @@ def _read_config_file(filename, schema): validate_section(data, schema) return data - except MarkedYAMLError, e: + except MarkedYAMLError as e: raise ConfigFileError( "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) - except IOError, e: + except IOError as e: raise ConfigFileError( "Error reading configuration file %s: %s" % (filename, str(e))) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 0d0a7db8a9146d952d3d7f7173a970601881af1b..4ea87bea7ea3e71b61b2766b464e1a1acf2b24e3 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -289,8 +289,14 @@ def reset(self): if not self.archive_file: raise NoArchiveFileError("Tried to reset URLFetchStrategy before fetching", "Failed on reset() for URL %s" % self.url) - if self.stage.source_path: - shutil.rmtree(self.stage.source_path, ignore_errors=True) + + # Remove everythigng but the archive from the stage + for filename in os.listdir(self.stage.path): + abspath = os.path.join(self.stage.path, filename) + if abspath != self.archive_file: + shutil.rmtree(abspath, ignore_errors=True) + + # Expand the archive again self.expand() def __repr__(self): diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 05c93cd3e655d9c3bb6dfba8c3e722dede9af1ca..61624fbd703e8debe12d6ad294ae961501783eee 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -48,6 +48,7 @@ import llnl.util.tty as tty import spack +import spack.config from llnl.util.filesystem import join_path, mkdirp from spack.environment import * @@ -56,6 +57,8 @@ # Registry of all types of modules. Entries created by EnvModule's metaclass module_types = {} +CONFIGURATION = spack.config.get_config('modules') + def print_help(): """For use by commands to tell user how to activate shell support.""" @@ -115,7 +118,7 @@ class EnvModule(object): class __metaclass__(type): def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) - if cls.name != 'env_module': + if cls.name != 'env_module' and cls.name in CONFIGURATION['enable']: module_types[cls.name] = cls def __init__(self, spec=None): @@ -158,13 +161,18 @@ def write(self): # Let the extendee modify their extensions before asking for # package-specific modifications - for extendee in self.pkg.extendees: - extendee_spec = self.spec[extendee] - extendee_spec.package.modify_module( - self.pkg.module, extendee_spec, self.spec) + spack_env = EnvironmentModifications() + for item in self.pkg.extendees: + try: + package = self.spec[item].package + package.setup_dependent_package(self.pkg.module, self.spec) + package.setup_dependent_environment(spack_env, env, self.spec) + except: + # The extends was conditional, so it doesn't count here + # eg: extends('python', when='+python') + pass # Package-specific environment modifications - spack_env = EnvironmentModifications() self.spec.package.setup_environment(spack_env, env) # TODO : implement site-specific modifications and filters @@ -203,7 +211,11 @@ def use_name(self): def remove(self): mod_file = self.file_name if os.path.exists(mod_file): - shutil.rmtree(mod_file, ignore_errors=True) + try: + os.remove(mod_file) # Remove the module file + os.removedirs(os.path.dirname(mod_file)) # Remove all the empty directories from the leaf up + except OSError: + pass # removedirs throws OSError on first non-empty directory found class Dotkit(EnvModule): @@ -275,6 +287,6 @@ def write_header(self, module_file): # Long description if self.long_description: module_file.write('proc ModulesHelp { } {\n') - doc = re.sub(r'"', '\"', self.long_description) - module_file.write("puts stderr \"%s\"\n" % doc) + for line in textwrap.wrap(self.long_description, 72): + module_file.write("puts stderr \"%s\"\n" % line) module_file.write('}\n\n') diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 9af32218374cddf18e0f065a953fc239122b7252..4065553131859d19f4a4e8da17dd29b019370cc3 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -335,6 +335,9 @@ def __init__(self, spec): if '.' in self.name: self.name = self.name[self.name.rindex('.') + 1:] + # Allow custom staging paths for packages + self.path=None + # Sanity check attributes required by Spack directives. spack.directives.ensure_dicts(type(self)) @@ -445,7 +448,8 @@ def _make_resource_stage(self, root_stage, fetcher, resource): resource_stage_folder = self._resource_stage(resource) resource_mirror = join_path(self.name, os.path.basename(fetcher.url)) stage = ResourceStage(resource.fetcher, root=root_stage, resource=resource, - name=resource_stage_folder, mirror_path=resource_mirror) + name=resource_stage_folder, mirror_path=resource_mirror, + path=self.path) return stage def _make_root_stage(self, fetcher): @@ -455,7 +459,7 @@ def _make_root_stage(self, fetcher): s = self.spec stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash()) # Build the composite stage - stage = Stage(fetcher, mirror_path=mp, name=stage_name) + stage = Stage(fetcher, mirror_path=mp, name=stage_name, path=self.path) return stage def _make_stage(self): @@ -709,7 +713,6 @@ def do_fetch(self, mirror_only=False): if spack.do_checksum and self.version in self.versions: self.stage.check() - def do_stage(self, mirror_only=False): """Unpacks the fetched tarball, then changes into the expanded tarball directory.""" @@ -926,6 +929,9 @@ def build_process(): install(env_path, env_install_path) dump_packages(self.spec, packages_dir) + # Run post install hooks before build stage is removed. + spack.hooks.post_install(self) + # Stop timer. self._total_time = time.time() - start_time build_time = self._total_time - self._fetch_time @@ -954,9 +960,6 @@ def build_process(): # the database, so that we don't need to re-read from file. spack.installed_db.add(self.spec, self.prefix) - # Once everything else is done, run post install hooks - spack.hooks.post_install(self) - def sanity_check_prefix(self): """This function checks whether install succeeded.""" diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index f88f82fc2d66bbf3e01bdaf70ce8fe2408ca8079..d711752c208f936d52a8e10c7799f6771f609c9f 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -89,7 +89,7 @@ class Stage(object): """ def __init__(self, url_or_fetch_strategy, - name=None, mirror_path=None, keep=False): + name=None, mirror_path=None, keep=False, path=None): """Create a stage object. Parameters: url_or_fetch_strategy @@ -135,7 +135,10 @@ def __init__(self, url_or_fetch_strategy, # Try to construct here a temporary name for the stage directory # If this is a named stage, then construct a named path. - self.path = join_path(spack.stage_path, self.name) + if path is not None: + self.path = path + else: + self.path = join_path(spack.stage_path, self.name) # Flag to decide whether to delete the stage folder on exit or not self.keep = keep diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index cd842561e6c1f957c32f3a245366f9220dd26698..175a49428c4b223f99814b541e71965bd7585dae 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -67,7 +67,8 @@ 'namespace_trie', 'yaml', 'sbang', - 'environment'] + 'environment', + 'cmd.uninstall'] def list_tests(): diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py index f3f6d4a22e682f25c8b6b60888af734316d89ecb..0b1aeb2a8fe86de5ef05e9e6036079a5c8724c88 100644 --- a/lib/spack/spack/test/cc.py +++ b/lib/spack/spack/test/cc.py @@ -28,6 +28,8 @@ """ import os import unittest +import tempfile +import shutil from llnl.util.filesystem import * import spack @@ -55,13 +57,40 @@ def setUp(self): self.ld = Executable(join_path(spack.build_env_path, "ld")) self.cpp = Executable(join_path(spack.build_env_path, "cpp")) - os.environ['SPACK_CC'] = "/bin/mycc" - os.environ['SPACK_PREFIX'] = "/usr" + self.realcc = "/bin/mycc" + self.prefix = "/spack-test-prefix" + + os.environ['SPACK_CC'] = self.realcc + os.environ['SPACK_PREFIX'] = self.prefix os.environ['SPACK_ENV_PATH']="test" os.environ['SPACK_DEBUG_LOG_DIR'] = "." os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7" os.environ['SPACK_SHORT_SPEC'] = "foo@1.2" + # Make some fake dependencies + self.tmp_deps = tempfile.mkdtemp() + self.dep1 = join_path(self.tmp_deps, 'dep1') + self.dep2 = join_path(self.tmp_deps, 'dep2') + self.dep3 = join_path(self.tmp_deps, 'dep3') + self.dep4 = join_path(self.tmp_deps, 'dep4') + + mkdirp(join_path(self.dep1, 'include')) + mkdirp(join_path(self.dep1, 'lib')) + + mkdirp(join_path(self.dep2, 'lib64')) + + mkdirp(join_path(self.dep3, 'include')) + mkdirp(join_path(self.dep3, 'lib64')) + + mkdirp(join_path(self.dep4, 'include')) + + if 'SPACK_DEPENDENCIES' in os.environ: + del os.environ['SPACK_DEPENDENCIES'] + + + def tearDown(self): + shutil.rmtree(self.tmp_deps, True) + def check_cc(self, command, args, expected): os.environ['SPACK_TEST_COMMAND'] = command @@ -92,6 +121,10 @@ def test_cpp_mode(self): self.check_cpp('dump-mode', [], "cpp") + def test_as_mode(self): + self.check_cc('dump-mode', ['-S'], "as") + + def test_ccld_mode(self): self.check_cc('dump-mode', [], "ccld") self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld") @@ -104,27 +137,85 @@ def test_ld_mode(self): self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld") - def test_includes(self): - self.check_cc('dump-includes', test_command, - "\n".join(["/test/include", "/other/include"])) + def test_dep_rpath(self): + """Ensure RPATHs for root package are added.""" + self.check_cc('dump-args', test_command, + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + ' '.join(test_command)) + + + def test_dep_include(self): + """Ensure a single dependency include directory is added.""" + os.environ['SPACK_DEPENDENCIES'] = self.dep4 + self.check_cc('dump-args', test_command, + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + '-I' + self.dep4 + '/include ' + + ' '.join(test_command)) + + + def test_dep_lib(self): + """Ensure a single dependency RPATH is added.""" + os.environ['SPACK_DEPENDENCIES'] = self.dep2 + self.check_cc('dump-args', test_command, + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + '-L' + self.dep2 + '/lib64 ' + + '-Wl,-rpath,' + self.dep2 + '/lib64 ' + + ' '.join(test_command)) + + + def test_all_deps(self): + """Ensure includes and RPATHs for all deps are added. """ + os.environ['SPACK_DEPENDENCIES'] = ':'.join([ + self.dep1, self.dep2, self.dep3, self.dep4]) + + # This is probably more constrained than it needs to be; it + # checks order within prepended args and doesn't strictly have + # to. We could loosen that if it becomes necessary + self.check_cc('dump-args', test_command, + self.realcc + ' ' + + '-Wl,-rpath,' + self.prefix + '/lib ' + + '-Wl,-rpath,' + self.prefix + '/lib64 ' + + + '-I' + self.dep4 + '/include ' + + + '-L' + self.dep3 + '/lib64 ' + + '-Wl,-rpath,' + self.dep3 + '/lib64 ' + + '-I' + self.dep3 + '/include ' + + + '-L' + self.dep2 + '/lib64 ' + + '-Wl,-rpath,' + self.dep2 + '/lib64 ' + + + '-L' + self.dep1 + '/lib ' + + '-Wl,-rpath,' + self.dep1 + '/lib ' + + '-I' + self.dep1 + '/include ' + + + ' '.join(test_command)) - def test_libraries(self): - self.check_cc('dump-libraries', test_command, - "\n".join(["/test/lib", "/other/lib"])) + def test_ld_deps(self): + """Ensure no (extra) -I args or -Wl, are passed in ld mode.""" + os.environ['SPACK_DEPENDENCIES'] = ':'.join([ + self.dep1, self.dep2, self.dep3, self.dep4]) + self.check_ld('dump-args', test_command, + 'ld ' + + '-rpath ' + self.prefix + '/lib ' + + '-rpath ' + self.prefix + '/lib64 ' + - def test_libs(self): - self.check_cc('dump-libs', test_command, - "\n".join(["lib1", "lib2", "lib3", "lib4"])) + '-L' + self.dep3 + '/lib64 ' + + '-rpath ' + self.dep3 + '/lib64 ' + + '-L' + self.dep2 + '/lib64 ' + + '-rpath ' + self.dep2 + '/lib64 ' + - def test_rpaths(self): - self.check_cc('dump-rpaths', test_command, - "\n".join(["/first/rpath", "/second/rpath", "/third/rpath", "/fourth/rpath"])) + '-L' + self.dep1 + '/lib ' + + '-rpath ' + self.dep1 + '/lib ' + + ' '.join(test_command)) - def test_other_args(self): - self.check_cc('dump-other-args', test_command, - "\n".join(["arg1", "-Wl,--start-group", "arg2", "arg3", "arg4", - "-Wl,--end-group", "arg5", "arg6"])) diff --git a/lib/spack/spack/test/cmd/__init__.py b/lib/spack/spack/test/cmd/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py new file mode 100644 index 0000000000000000000000000000000000000000..80efe06d36efa6662ac5c037d25e67cd6cbbf189 --- /dev/null +++ b/lib/spack/spack/test/cmd/uninstall.py @@ -0,0 +1,37 @@ +import spack.test.mock_database + +from spack.cmd.uninstall import uninstall + + +class MockArgs(object): + def __init__(self, packages, all=False, force=False, dependents=False): + self.packages = packages + self.all = all + self.force = force + self.dependents = dependents + self.yes_to_all = True + + +class TestUninstall(spack.test.mock_database.MockDatabase): + def test_uninstall(self): + parser = None + # Multiple matches + args = MockArgs(['mpileaks']) + self.assertRaises(SystemExit, uninstall, parser, args) + # Installed dependents + args = MockArgs(['libelf']) + self.assertRaises(SystemExit, uninstall, parser, args) + # Recursive uninstall + args = MockArgs(['callpath'], all=True, dependents=True) + uninstall(parser, args) + + all_specs = spack.install_layout.all_specs() + self.assertEqual(len(all_specs), 7) + # query specs with multiple configurations + mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')] + callpath_specs = [s for s in all_specs if s.satisfies('callpath')] + mpi_specs = [s for s in all_specs if s.satisfies('mpi')] + + self.assertEqual(len(mpileaks_specs), 0) + self.assertEqual(len(callpath_specs), 0) + self.assertEqual(len(mpi_specs), 3) diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index 08cce0967479163fb324b870c26dd4c54618a7ee..9cd8c969aed4e534c4b9dc930a44ea006c414d6e 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -24,6 +24,7 @@ ############################################################################## import spack from spack.spec import Spec, CompilerSpec +from spack.version import ver from spack.concretize import find_spec from spack.test.mock_packages_test import * @@ -77,6 +78,14 @@ def test_concretize_variant(self): self.check_concretize('mpich') + def test_concretize_preferred_version(self): + spec = self.check_concretize('python') + self.assertEqual(spec.versions, ver('2.7.11')) + + spec = self.check_concretize('python@3.5.1') + self.assertEqual(spec.versions, ver('3.5.1')) + + def test_concretize_with_virtual(self): self.check_concretize('mpileaks ^mpi') self.check_concretize('mpileaks ^mpi@:1.1') diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index ce6e8a0552ba82f3f4182874f8b3a86ae7f5f9e0..465263d057465d57667c11f4cfd425ef0a1157c1 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -28,16 +28,12 @@ """ import os.path import multiprocessing -import shutil -import tempfile import spack from llnl.util.filesystem import join_path from llnl.util.lock import * from llnl.util.tty.colify import colify -from spack.database import Database -from spack.directory_layout import YamlDirectoryLayout -from spack.test.mock_packages_test import * +from spack.test.mock_database import MockDatabase def _print_ref_counts(): @@ -75,80 +71,7 @@ def add_rec(spec): colify(recs, cols=3) -class DatabaseTest(MockPackagesTest): - - def _mock_install(self, spec): - s = Spec(spec) - s.concretize() - pkg = spack.repo.get(s) - pkg.do_install(fake=True) - - - def _mock_remove(self, spec): - specs = spack.installed_db.query(spec) - assert(len(specs) == 1) - spec = specs[0] - spec.package.do_uninstall(spec) - - - def setUp(self): - super(DatabaseTest, self).setUp() - # - # TODO: make the mockup below easier. - # - - # Make a fake install directory - self.install_path = tempfile.mkdtemp() - self.spack_install_path = spack.install_path - spack.install_path = self.install_path - - self.install_layout = YamlDirectoryLayout(self.install_path) - self.spack_install_layout = spack.install_layout - spack.install_layout = self.install_layout - - # Make fake database and fake install directory. - self.installed_db = Database(self.install_path) - self.spack_installed_db = spack.installed_db - spack.installed_db = self.installed_db - - # make a mock database with some packages installed note that - # the ref count for dyninst here will be 3, as it's recycled - # across each install. - # - # Here is what the mock DB looks like: - # - # o mpileaks o mpileaks' o mpileaks'' - # |\ |\ |\ - # | o callpath | o callpath' | o callpath'' - # |/| |/| |/| - # o | mpich o | mpich2 o | zmpi - # | | o | fake - # | | | - # | |______________/ - # | .____________/ - # |/ - # o dyninst - # |\ - # | o libdwarf - # |/ - # o libelf - # - - # Transaction used to avoid repeated writes. - with spack.installed_db.write_transaction(): - self._mock_install('mpileaks ^mpich') - self._mock_install('mpileaks ^mpich2') - self._mock_install('mpileaks ^zmpi') - - - def tearDown(self): - super(DatabaseTest, self).tearDown() - shutil.rmtree(self.install_path) - spack.install_path = self.spack_install_path - spack.install_layout = self.spack_install_layout - spack.installed_db = self.spack_installed_db - - +class DatabaseTest(MockDatabase): def test_005_db_exists(self): """Make sure db cache file exists after creating.""" index_file = join_path(self.install_path, '.spack-db', 'index.yaml') @@ -157,7 +80,6 @@ def test_005_db_exists(self): self.assertTrue(os.path.exists(index_file)) self.assertTrue(os.path.exists(lock_file)) - def test_010_all_install_sanity(self): """Ensure that the install layout reflects what we think it does.""" all_specs = spack.install_layout.all_specs() diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 8297893f012e6bf8bb7f9bee773d5c6bb9f9e314..fc5b7e67dfe5f7d4abb65fe4e795db51f9778e87 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -64,7 +64,14 @@ def tearDown(self): shutil.rmtree(self.tmpdir, ignore_errors=True) - def test_install_and_uninstall(self): + def fake_fetchify(self, pkg): + """Fake the URL for a package so it downloads from a file.""" + fetcher = FetchStrategyComposite() + fetcher.append(URLFetchStrategy(self.repo.url)) + pkg.fetcher = fetcher + + + def ztest_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. spec = Spec('trivial_install_test_package') spec.concretize() @@ -73,11 +80,7 @@ def test_install_and_uninstall(self): # Get the package pkg = spack.repo.get(spec) - # Fake the URL for the package so it downloads from a file. - - fetcher = FetchStrategyComposite() - fetcher.append(URLFetchStrategy(self.repo.url)) - pkg.fetcher = fetcher + self.fake_fetchify(pkg) try: pkg.do_install() @@ -85,3 +88,17 @@ def test_install_and_uninstall(self): except Exception, e: pkg.remove_prefix() raise + + + def test_install_environment(self): + spec = Spec('cmake-client').concretized() + + for s in spec.traverse(): + self.fake_fetchify(s.package) + + pkg = spec.package + try: + pkg.do_install() + except Exception, e: + pkg.remove_prefix() + raise diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py new file mode 100644 index 0000000000000000000000000000000000000000..82ba59fc4834c6a5194b88bbcf59729d9e0ee103 --- /dev/null +++ b/lib/spack/spack/test/mock_database.py @@ -0,0 +1,80 @@ +import shutil +import tempfile + +import spack +from spack.spec import Spec +from spack.database import Database +from spack.directory_layout import YamlDirectoryLayout +from spack.test.mock_packages_test import MockPackagesTest + + +class MockDatabase(MockPackagesTest): + def _mock_install(self, spec): + s = Spec(spec) + s.concretize() + pkg = spack.repo.get(s) + pkg.do_install(fake=True) + + def _mock_remove(self, spec): + specs = spack.installed_db.query(spec) + assert len(specs) == 1 + spec = specs[0] + spec.package.do_uninstall(spec) + + def setUp(self): + super(MockDatabase, self).setUp() + # + # TODO: make the mockup below easier. + # + + # Make a fake install directory + self.install_path = tempfile.mkdtemp() + self.spack_install_path = spack.install_path + spack.install_path = self.install_path + + self.install_layout = YamlDirectoryLayout(self.install_path) + self.spack_install_layout = spack.install_layout + spack.install_layout = self.install_layout + + # Make fake database and fake install directory. + self.installed_db = Database(self.install_path) + self.spack_installed_db = spack.installed_db + spack.installed_db = self.installed_db + + # make a mock database with some packages installed note that + # the ref count for dyninst here will be 3, as it's recycled + # across each install. + # + # Here is what the mock DB looks like: + # + # o mpileaks o mpileaks' o mpileaks'' + # |\ |\ |\ + # | o callpath | o callpath' | o callpath'' + # |/| |/| |/| + # o | mpich o | mpich2 o | zmpi + # | | o | fake + # | | | + # | |______________/ + # | .____________/ + # |/ + # o dyninst + # |\ + # | o libdwarf + # |/ + # o libelf + # + + # Transaction used to avoid repeated writes. + with spack.installed_db.write_transaction(): + self._mock_install('mpileaks ^mpich') + self._mock_install('mpileaks ^mpich2') + self._mock_install('mpileaks ^zmpi') + + def tearDown(self): + for spec in spack.installed_db.query(): + spec.package.do_uninstall(spec) + super(MockDatabase, self).tearDown() + shutil.rmtree(self.install_path) + spack.install_path = self.spack_install_path + spack.install_layout = self.spack_install_layout + spack.installed_db = self.spack_installed_db diff --git a/var/spack/repos/builtin.mock/packages/cmake-client/package.py b/var/spack/repos/builtin.mock/packages/cmake-client/package.py new file mode 100644 index 0000000000000000000000000000000000000000..a5d3ef156a6f30a9ebf0d2c968067b0c4fa41390 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/cmake-client/package.py @@ -0,0 +1,89 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os + +def check(condition, msg): + """Raise an install error if condition is False.""" + if not condition: + raise InstallError(msg) + + +class CmakeClient(Package): + """A dumy package that uses cmake.""" + homepage = 'https://www.example.com' + url = 'https://www.example.com/cmake-client-1.0.tar.gz' + + version('1.0', '4cb3ff35b2472aae70f542116d616e63') + + depends_on('cmake') + + + def setup_environment(self, spack_env, run_env): + spack_cc # Ensure spack module-scope variable is avaiabl + check(from_cmake == "from_cmake", + "setup_environment couldn't read global set by cmake.") + + check(self.spec['cmake'].link_arg == "test link arg", + "link arg on dependency spec not readable from setup_environment.") + + + def setup_dependent_environment(self, spack_env, run_env, dspec): + spack_cc # Ensure spack module-scope variable is avaiable + check(from_cmake == "from_cmake", + "setup_dependent_environment couldn't read global set by cmake.") + + check(self.spec['cmake'].link_arg == "test link arg", + "link arg on dependency spec not readable from setup_dependent_environment.") + + + def setup_dependent_package(self, module, dspec): + spack_cc # Ensure spack module-scope variable is avaiable + check(from_cmake == "from_cmake", + "setup_dependent_package couldn't read global set by cmake.") + + check(self.spec['cmake'].link_arg == "test link arg", + "link arg on dependency spec not readable from setup_dependent_package.") + + + + def install(self, spec, prefix): + # check that cmake is in the global scope. + global cmake + check(cmake is not None, "No cmake was in environment!") + + # check that which('cmake') returns the right one. + cmake = which('cmake') + check(cmake.exe[0].startswith(spec['cmake'].prefix.bin), + "Wrong cmake was in environment: %s" % cmake) + + check(from_cmake == "from_cmake", + "Couldn't read global set by cmake.") + + check(os.environ['from_cmake'] == 'from_cmake', + "Couldn't read env var set in envieonmnt by dependency") + + mkdirp(prefix.bin) + touch(join_path(prefix.bin, 'dummy')) diff --git a/var/spack/repos/builtin.mock/packages/cmake/package.py b/var/spack/repos/builtin.mock/packages/cmake/package.py new file mode 100644 index 0000000000000000000000000000000000000000..deb44c2bf767387320efc87670949feadfc4371b --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/cmake/package.py @@ -0,0 +1,69 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os + +def check(condition, msg): + """Raise an install error if condition is False.""" + if not condition: + raise InstallError(msg) + + +class Cmake(Package): + """A dumy package for the cmake build system.""" + homepage = 'https://www.cmake.org' + url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' + + version('3.4.3', '4cb3ff35b2472aae70f542116d616e63', + url='https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz') + + + def setup_environment(self, spack_env, run_env): + spack_cc # Ensure spack module-scope variable is avaiable + spack_env.set('for_install', 'for_install') + + def setup_dependent_environment(self, spack_env, run_env, dspec): + spack_cc # Ensure spack module-scope variable is avaiable + spack_env.set('from_cmake', 'from_cmake') + + + def setup_dependent_package(self, module, dspec): + spack_cc # Ensure spack module-scope variable is avaiable + + self.spec.from_cmake = "from_cmake" + module.from_cmake = "from_cmake" + + self.spec.link_arg = "test link arg" + + + def install(self, spec, prefix): + mkdirp(prefix.bin) + + check(os.environ['for_install'] == 'for_install', + "Couldn't read env var set in compile envieonmnt") + + cmake_exe = join_path(prefix.bin, 'cmake') + touch(cmake_exe) + set_executable(cmake_exe) diff --git a/var/spack/repos/builtin.mock/packages/python/package.py b/var/spack/repos/builtin.mock/packages/python/package.py new file mode 100644 index 0000000000000000000000000000000000000000..c5fed52f5347801768ab2523e1faf169f00a4593 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/python/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Python(Package): + """Dummy Python package to demonstrate preferred versions.""" + homepage = "http://www.python.org" + url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz" + + extendable = True + + version('3.5.1', 'be78e48cdfc1a7ad90efff146dce6cfe') + version('3.5.0', 'a56c0c0b45d75a0ec9c6dee933c41c36') + version('2.7.11', '6b6076ec9e93f05dd63e47eb9c15728b', preferred=True) + version('2.7.10', 'd7547558fd673bd9d38e2108c6b42521') + version('2.7.9', '5eebcaa0030dc4061156d3429657fb83') + version('2.7.8', 'd4bca0159acb0b44a781292b5231936f') + + def install(self, spec, prefix): + pass + diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py new file mode 100644 index 0000000000000000000000000000000000000000..8f19c84d225ff988bca89532bcab3dbf2a815439 --- /dev/null +++ b/var/spack/repos/builtin/packages/apr-util/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class AprUtil(Package): + """Apache Portable Runtime Utility""" + homepage = 'https://apr.apache.org/' + url = 'http://archive.apache.org/dist/apr/apr-util-1.5.4.tar.gz' + + version('1.5.4', '866825c04da827c6e5f53daff5569f42') + + depends_on('apr') + + def install(self, spec, prefix): + + # configure, build, install: + options = ['--prefix=%s' % prefix] + options.append('--with-apr=%s' % spec['apr'].prefix) + + configure(*options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/apr/package.py b/var/spack/repos/builtin/packages/apr/package.py new file mode 100644 index 0000000000000000000000000000000000000000..8a440766ece79fbde51db7171a6dfe0b46df6837 --- /dev/null +++ b/var/spack/repos/builtin/packages/apr/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Apr(Package): + """Apache portable runtime.""" + homepage = 'https://apr.apache.org/' + url = 'http://archive.apache.org/dist/apr/apr-1.5.2.tar.gz' + + version('1.5.2', '98492e965963f852ab29f9e61b2ad700') + + def install(self, spec, prefix): + options = ['--prefix=%s' % prefix] + configure(*options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py index 614071cf534c84a1573e31bfebfa0a9b48365389..6b152f786311decc2206e2dd657260b18cb88873 100644 --- a/var/spack/repos/builtin/packages/arpack-ng/package.py +++ b/var/spack/repos/builtin/packages/arpack-ng/package.py @@ -41,16 +41,26 @@ class ArpackNg(Package): depends_on('blas') depends_on('lapack') + depends_on('automake') + depends_on('autoconf') + depends_on('libtool@2.4.2:') + depends_on('mpi', when='+mpi') def install(self, spec, prefix): # Apparently autotools are not bootstrapped + # TODO: switch to use the CMake build in the next version + # rather than bootstrapping. + which('libtoolize')() bootstrap = Executable('./bootstrap') options = ['--prefix=%s' % prefix] if '+mpi' in spec: - options.append('--enable-mpi') + options.extend([ + '--enable-mpi', + 'F77=mpif77' #FIXME: avoid hardcoding MPI wrapper names + ]) if '~shared' in spec: options.append('--enable-shared=no') diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py new file mode 100644 index 0000000000000000000000000000000000000000..7260fd74a1d6f156652263ba8b02f7f32e47c2f9 --- /dev/null +++ b/var/spack/repos/builtin/packages/astyle/package.py @@ -0,0 +1,17 @@ +from spack import * +import os + +class Astyle(Package): + """A Free, Fast, and Small Automatic Formatter for C, C++, C++/CLI, Objective-C, C#, and Java Source Code.""" + homepage = "http://astyle.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/astyle/astyle/astyle%202.04/astyle_2.04_linux.tar.gz" + + version('2.04', '30b1193a758b0909d06e7ee8dd9627f6') + + def install(self, spec, prefix): + + with working_dir('src'): + make('-f', + join_path(self.stage.source_path,'build','clang','Makefile'), + parallel=False) + install(join_path(self.stage.source_path, 'src','bin','astyle'), self.prefix.bin) diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index fc683363a70dc6217fd98b64271dce26e5d0a223..b5504122b7440f6621f870f71a4a3303972813b6 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -1,31 +1,36 @@ from spack import * from spack.util.executable import Executable -import os +import os.path class Atlas(Package): """ - Automatically Tuned Linear Algebra Software, generic shared - ATLAS is an approach for the automatic generation and optimization of - numerical software. Currently ATLAS supplies optimized versions for the - complete set of linear algebra kernels known as the Basic Linear Algebra - Subroutines (BLAS), and a subset of the linear algebra routines in the - LAPACK library. + Automatically Tuned Linear Algebra Software, generic shared ATLAS is an approach for the automatic generation and + optimization of numerical software. Currently ATLAS supplies optimized versions for the complete set of linear + algebra kernels known as the Basic Linear Algebra Subroutines (BLAS), and a subset of the linear algebra routines + in the LAPACK library. """ homepage = "http://math-atlas.sourceforge.net/" + version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', + url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True) + resource(name='lapack', + url='http://www.netlib.org/lapack/lapack-3.5.0.tgz', + md5='b1d3e3e425b2e44a06760ff173104bdf', + destination='spack-resource-lapack', + when='@3:') + version('3.11.34', '0b6c5389c095c4c8785fd0f724ec6825', url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2/download') - version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', - url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2') - # TODO: make this provide BLAS once it works better. Create a way - # TODO: to mark "beta" packages and require explicit invocation. + variant('shared', default=True, description='Builds shared library') - # provides('blas') + provides('blas') + provides('lapack') + parallel = False def patch(self): - # Disable thraed check. LLNL's environment does not allow + # Disable thread check. LLNL's environment does not allow # disabling of CPU throttling in a way that ATLAS actually # understands. filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);', @@ -33,26 +38,21 @@ def patch(self): # TODO: investigate a better way to add the check back in # TODO: using, say, MSRs. Or move this to a variant. - @when('@:3.10') def install(self, spec, prefix): - with working_dir('ATLAS-Build', create=True): - configure = Executable('../configure') - configure('--prefix=%s' % prefix, '-C', 'ic', 'cc', '-C', 'if', 'f77', "--dylibs") - make() - make('check') - make('ptcheck') - make('time') - make("install") + options = [] + if '+shared' in spec: + options.append('--shared') - def install(self, spec, prefix): - with working_dir('ATLAS-Build', create=True): - configure = Executable('../configure') - configure('--incdir=%s' % prefix.include, - '--libdir=%s' % prefix.lib, - '--cc=cc', - "--shared") + # Lapack resource + lapack_stage = self.stage[1] + lapack_tarfile = os.path.basename(lapack_stage.fetcher.url) + lapack_tarfile_path = join_path(lapack_stage.path, lapack_tarfile) + options.append('--with-netlib-lapack-tarfile=%s' % lapack_tarfile_path) + with working_dir('spack-build', create=True): + configure = Executable('../configure') + configure('--prefix=%s' % prefix, *options) make() make('check') make('ptcheck') diff --git a/var/spack/repos/builtin/packages/bash/package.py b/var/spack/repos/builtin/packages/bash/package.py new file mode 100644 index 0000000000000000000000000000000000000000..9c9fbeedcf84fedc10da45116703aafce6bed3f5 --- /dev/null +++ b/var/spack/repos/builtin/packages/bash/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Bash(Package): + """The GNU Project's Bourne Again SHell.""" + + homepage = "https://www.gnu.org/software/bash/" + url = "ftp://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz" + + version('4.3', '81348932d5da294953e15d4814c74dd1') + + depends_on('readline') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--with-curses', + '--with-installed-readline=%s' % spec['readline'].prefix) + + make() + make("tests") + make("install") diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py index 897539a439100a03e63d489de32f6465e372184c..b8064093d27b61c3bb343dd7d1653d83f950650f 100644 --- a/var/spack/repos/builtin/packages/binutils/package.py +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -12,6 +12,10 @@ class Binutils(Package): version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e') version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764') + depends_on('m4') + depends_on('flex') + depends_on('bison') + # Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell variant('krellpatch', default=False, description="build with openspeedshop based patch.") variant('gold', default=True, description="build the gold linker") diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index fb1f5daee7f371e33d0f69a7a758e93c5bd63f0d..12bc9508c319a70cf47771dd56cd9b848f32933b 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -1,5 +1,9 @@ from spack import * import spack +import sys + +import os +import sys class Boost(Package): """Boost provides free peer-reviewed portable C++ source @@ -45,34 +49,34 @@ class Boost(Package): version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5') version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0') - default_install_libs = set(['atomic', - 'chrono', - 'date_time', - 'filesystem', + default_install_libs = set(['atomic', + 'chrono', + 'date_time', + 'filesystem', 'graph', 'iostreams', 'locale', 'log', - 'math', + 'math', 'program_options', - 'random', - 'regex', - 'serialization', - 'signals', - 'system', - 'test', - 'thread', + 'random', + 'regex', + 'serialization', + 'signals', + 'system', + 'test', + 'thread', 'wave']) - # mpi/python are not installed by default because they pull in many - # dependencies and/or because there is a great deal of customization + # mpi/python are not installed by default because they pull in many + # dependencies and/or because there is a great deal of customization # possible (and it would be difficult to choose sensible defaults) default_noinstall_libs = set(['mpi', 'python']) all_libs = default_install_libs | default_noinstall_libs for lib in all_libs: - variant(lib, default=(lib not in default_noinstall_libs), + variant(lib, default=(lib not in default_noinstall_libs), description="Compile with {0} library".format(lib)) variant('debug', default=False, description='Switch to the debug version of Boost') @@ -124,9 +128,9 @@ def determine_bootstrap_options(self, spec, withLibs, options): with open('user-config.jam', 'w') as f: compiler_wrapper = join_path(spack.build_env_path, 'c++') - f.write("using {0} : : {1} ;\n".format(boostToolsetId, + f.write("using {0} : : {1} ;\n".format(boostToolsetId, compiler_wrapper)) - + if '+mpi' in spec: f.write('using mpi : %s ;\n' % join_path(spec['mpi'].prefix.bin, 'mpicxx')) @@ -155,7 +159,7 @@ def determine_b2_options(self, spec, options): linkTypes = ['static'] if '+shared' in spec: linkTypes.append('shared') - + threadingOpts = [] if '+multithreaded' in spec: threadingOpts.append('multi') @@ -163,28 +167,50 @@ def determine_b2_options(self, spec, options): threadingOpts.append('single') if not threadingOpts: raise RuntimeError("At least one of {singlethreaded, multithreaded} must be enabled") - + options.extend([ 'toolset=%s' % self.determine_toolset(spec), 'link=%s' % ','.join(linkTypes), '--layout=tagged']) - + return threadingOpts def install(self, spec, prefix): + # On Darwin, Boost expects the Darwin libtool. However, one of the + # dependencies may have pulled in Spack's GNU libtool, and these two are + # not compatible. We thus create a symlink to Darwin's libtool and add + # it at the beginning of PATH. + if sys.platform == 'darwin': + newdir = os.path.abspath('darwin-libtool') + mkdirp(newdir) + force_symlink('/usr/bin/libtool', join_path(newdir, 'libtool')) + env['PATH'] = newdir + ':' + env['PATH'] + withLibs = list() for lib in Boost.all_libs: if "+{0}".format(lib) in spec: withLibs.append(lib) if not withLibs: - # if no libraries are specified for compilation, then you dont have + # if no libraries are specified for compilation, then you dont have # to configure/build anything, just copy over to the prefix directory. src = join_path(self.stage.source_path, 'boost') mkdirp(join_path(prefix, 'include')) dst = join_path(prefix, 'include', 'boost') install_tree(src, dst) return - + + # Remove libraries that the release version does not support + if not spec.satisfies('@1.54.0:'): + withLibs.remove('log') + if not spec.satisfies('@1.53.0:'): + withLibs.remove('atomic') + if not spec.satisfies('@1.48.0:'): + withLibs.remove('locale') + if not spec.satisfies('@1.47.0:'): + withLibs.remove('chrono') + if not spec.satisfies('@1.43.0:'): + withLibs.remove('random') + # to make Boost find the user-config.jam env['BOOST_BUILD_PATH'] = './' @@ -207,4 +233,7 @@ def install(self, spec, prefix): # Boost.MPI if the threading options are not separated. for threadingOpt in threadingOpts: b2('install', 'threading=%s' % threadingOpt, *b2_options) - + + # The shared libraries are not installed correctly on Darwin; correct this + if (sys.platform == 'darwin') and ('+shared' in spec): + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index cc93c7067c3a05d63086f2f52aaf8e55555941d0..91a4e3b415f75efbc6181cf63e3ca3f5f9835480 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -30,6 +30,7 @@ class Cmake(Package): homepage = 'https://www.cmake.org' url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz' + version('3.5.1', 'ca051f4a66375c89d1a524e726da0296') version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e') version('3.4.3', '4cb3ff35b2472aae70f542116d616e63') version('3.4.0', 'cd3034e0a44256a0917e254167217fc8') @@ -38,10 +39,12 @@ class Cmake(Package): version('2.8.10.2', '097278785da7182ec0aea8769d06860c') variant('ncurses', default=True, description='Enables the build of the ncurses gui') + variant('openssl', default=True, description="Enables CMake's OpenSSL features") variant('qt', default=False, description='Enables the build of cmake-gui') variant('doc', default=False, description='Enables the generation of html and man page documentation') depends_on('ncurses', when='+ncurses') + depends_on('openssl', when='+openssl') depends_on('qt', when='+qt') depends_on('python@2.7.11:', when='+doc') depends_on('py-sphinx', when='+doc') @@ -77,8 +80,9 @@ def install(self, spec, prefix): options.append('--sphinx-html') options.append('--sphinx-man') - options.append('--') - options.append('-DCMAKE_USE_OPENSSL=ON') + if '+openssl' in spec: + options.append('--') + options.append('-DCMAKE_USE_OPENSSL=ON') configure(*options) make() diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py index 1693c4b160f83d587cfd95b781d146e5d9704869..bc83cb2b651aaa953160bb7c1ce7b833a4a3a84d 100644 --- a/var/spack/repos/builtin/packages/cryptopp/package.py +++ b/var/spack/repos/builtin/packages/cryptopp/package.py @@ -8,8 +8,8 @@ class Cryptopp(Package): public-key encryption (RSA, DSA), and a few obsolete/historical encryption algorithms (MD5, Panama).""" - homepage = "http://www.cryptopp.com/" - url = "http://www.cryptopp.com/cryptopp563.zip" + homepage = "http://www.cryptopp.com" + base_url = "http://www.cryptopp.com" version('5.6.3', '3c5b70e2ec98b7a24988734446242d07') version('5.6.2', '7ed022585698df48e65ce9218f6c6a67') @@ -25,7 +25,5 @@ def install(self, spec, prefix): install('libcryptopp.a', prefix.lib) def url_for_version(self, version): - version_tuple = tuple(v for v in iter(version)) - version_string = reduce(lambda vs, nv: vs + str(nv), version_tuple, "") - - return "%scryptopp%s.zip" % (Cryptopp.homepage, version_string) + version_string = str(version).replace('.', '') + return '%s/cryptopp%s.zip' % (Cryptopp.base_url, version_string) diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py new file mode 100644 index 0000000000000000000000000000000000000000..ea083d8651d822ef0a7b8e6e0d7352496999319b --- /dev/null +++ b/var/spack/repos/builtin/packages/cuda/package.py @@ -0,0 +1,47 @@ +from spack import * +from glob import glob +import os + +class Cuda(Package): + """CUDA is a parallel computing platform and programming model invented by + NVIDIA. It enables dramatic increases in computing performance by harnessing + the power of the graphics processing unit (GPU). + + Note: NVIDIA does not provide a download URL for CUDA so you will need to + download it yourself. Go to https://developer.nvidia.com/cuda-downloads + and select your Operating System, Architecture, Distribution, and Version. + For the Installer Type, select runfile and click Download. Spack will search + your current directory for this file. Alternatively, add this file to a + mirror so that Spack can find it. For instructions on how to set up a mirror, + see http://software.llnl.gov/spack/mirrors.html + + Note: This package does not currently install the drivers necessary to run + CUDA. These will need to be installed manually. See: + http://docs.nvidia.com/cuda/cuda-getting-started-guide-for-linux for details.""" + + homepage = "http://www.nvidia.com/object/cuda_home_new.html" + + version('7.5.18', '4b3bcecf0dfc35928a0898793cf3e4c6', expand=False, + url="file://%s/cuda_7.5.18_linux.run" % os.getcwd()) + version('6.5.14', '90b1b8f77313600cc294d9271741f4da', expand=False, + url="file://%s/cuda_6.5.14_linux_64.run" % os.getcwd()) + + + def install(self, spec, prefix): + runfile = glob(os.path.join(self.stage.path, 'cuda*.run'))[0] + chmod = which('chmod') + chmod('+x', runfile) + runfile = which(runfile) + + # Note: NVIDIA does not officially support many newer versions of compilers. + # For example, on CentOS 6, you must use GCC 4.4.7 or older. See: + # http://docs.nvidia.com/cuda/cuda-installation-guide-linux/#system-requirements + # for details. + + runfile( + '--silent', # disable interactive prompts + '--verbose', # create verbose log file + '--toolkit', # install CUDA Toolkit + '--toolkitpath=%s' % prefix + ) + diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py index 294b0de54ea84fc0306b14f8cc2ec8a365de3c35..74ce8ef502d4614f26d582e9f8f3b64f02969899 100644 --- a/var/spack/repos/builtin/packages/dbus/package.py +++ b/var/spack/repos/builtin/packages/dbus/package.py @@ -13,12 +13,15 @@ class Dbus(Package): homepage = "http://dbus.freedesktop.org/" url = "http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz" + version('1.11.2', '957a07f066f3730d2bb3ea0932f0081b') version('1.9.0', 'ec6895a4d5c0637b01f0d0e7689e2b36') version('1.8.8', 'b9f4a18ee3faa1e07c04aa1d83239c43') version('1.8.6', '6a08ba555d340e9dfe2d623b83c0eea8') version('1.8.4', '4717cb8ab5b80978fcadf2b4f2f72e1b') version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068') + depends_on('expat') + def install(self, spec, prefix): configure( "--prefix=%s" % prefix, diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py new file mode 100644 index 0000000000000000000000000000000000000000..b251d50ca1df2d2694966d3acc56af5adae7d71d --- /dev/null +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -0,0 +1,253 @@ +from spack import * +import sys + +class Dealii(Package): + """C++ software library providing well-documented tools to build finite element codes for a broad variety of PDEs.""" + homepage = "https://www.dealii.org" + url = "https://github.com/dealii/dealii/releases/download/v8.4.0/dealii-8.4.0.tar.gz" + + version('8.4.0', 'ac5dbf676096ff61e092ce98c80c2b00') + version('dev', git='https://github.com/dealii/dealii.git') + + variant('mpi', default=True, description='Compile with MPI') + variant('arpack', default=True, description='Compile with Arpack and PArpack (only with MPI)') + variant('doc', default=False, description='Compile with documentation') + variant('hdf5', default=True, description='Compile with HDF5 (only with MPI)') + variant('metis', default=True, description='Compile with Metis') + variant('netcdf', default=True, description='Compile with Netcdf (only with MPI)') + variant('oce', default=True, description='Compile with OCE') + variant('p4est', default=True, description='Compile with P4est (only with MPI)') + variant('petsc', default=True, description='Compile with Petsc (only with MPI)') + variant('slepc', default=True, description='Compile with Slepc (only with Petsc and MPI)') + variant('trilinos', default=True, description='Compile with Trilinos (only with MPI)') + + # required dependencies, light version + depends_on ("blas") + # Boost 1.58 is blacklisted, see https://github.com/dealii/dealii/issues/1591 + # require at least 1.59 + depends_on ("boost@1.59.0:", when='~mpi') + depends_on ("boost@1.59.0:+mpi", when='+mpi') + depends_on ("bzip2") + depends_on ("cmake") + depends_on ("lapack") + depends_on ("muparser") + depends_on ("suite-sparse") + depends_on ("tbb") + depends_on ("zlib") + + # optional dependencies + depends_on ("mpi", when="+mpi") + depends_on ("arpack-ng+mpi", when='+arpack+mpi') + depends_on ("doxygen", when='+doc') + depends_on ("hdf5+mpi~cxx", when='+hdf5+mpi') #FIXME NetCDF declares dependency with ~cxx, why? + depends_on ("metis@5:", when='+metis') + depends_on ("netcdf+mpi", when="+netcdf+mpi") + depends_on ("netcdf-cxx", when='+netcdf+mpi') + depends_on ("oce", when='+oce') + depends_on ("p4est", when='+p4est+mpi') + depends_on ("petsc+mpi", when='+petsc+mpi') + depends_on ("slepc", when='+slepc+petsc+mpi') + depends_on ("trilinos", when='+trilinos+mpi') + + # developer dependnecies + #depends_on ("numdiff") #FIXME + #depends_on ("astyle") #FIXME + + def install(self, spec, prefix): + options = [] + options.extend(std_cmake_args) + + # CMAKE_BUILD_TYPE should be DebugRelease | Debug | Release + for word in options[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + options.remove(word) + + dsuf = 'dylib' if sys.platform == 'darwin' else 'so' + options.extend([ + '-DCMAKE_BUILD_TYPE=DebugRelease', + '-DDEAL_II_COMPONENT_EXAMPLES=ON', + '-DDEAL_II_WITH_THREADS:BOOL=ON', + '-DBOOST_DIR=%s' % spec['boost'].prefix, + '-DBZIP2_DIR=%s' % spec['bzip2'].prefix, + # CMake's FindBlas/Lapack may pickup system's blas/lapack instead of Spack's. + # Be more specific to avoid this. + # Note that both lapack and blas are provided in -DLAPACK_XYZ variables + '-DLAPACK_FOUND=true', + '-DLAPACK_INCLUDE_DIRS=%s;%s' % + (spec['lapack'].prefix.include, + spec['blas'].prefix.include), + '-DLAPACK_LIBRARIES=%s;%s' % + (join_path(spec['lapack'].prefix.lib,'liblapack.%s' % dsuf), # FIXME don't hardcode names + join_path(spec['blas'].prefix.lib,'libblas.%s' % dsuf)), # FIXME don't hardcode names + '-DMUPARSER_DIR=%s ' % spec['muparser'].prefix, + '-DP4EST_DIR=%s' % spec['p4est'].prefix, + '-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix, + '-DTBB_DIR=%s' % spec['tbb'].prefix, + '-DZLIB_DIR=%s' % spec['zlib'].prefix + ]) + + # MPI + if '+mpi' in spec: + options.extend([ + '-DDEAL_II_WITH_MPI:BOOL=ON', + '-DCMAKE_C_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # FIXME: avoid hardcoding mpi wrappers names + '-DCMAKE_CXX_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'), + '-DCMAKE_Fortran_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'), + ]) + else: + options.extend([ + '-DDEAL_II_WITH_MPI:BOOL=OFF', + ]) + + # Optional dependencies for which librariy names are the same as CMake variables + for library in ('hdf5', 'p4est','petsc', 'slepc','trilinos','metis'): + if library in spec: + options.extend([ + '-D{library}_DIR={value}'.format(library=library.upper(), value=spec[library].prefix), + '-DDEAL_II_WITH_{library}:BOOL=ON'.format(library=library.upper()) + ]) + else: + options.extend([ + '-DDEAL_II_WITH_{library}:BOOL=OFF'.format(library=library.upper()) + ]) + + # doxygen + options.extend([ + '-DDEAL_II_COMPONENT_DOCUMENTATION=%s' % ('ON' if '+doc' in spec else 'OFF'), + ]) + + + # arpack + if '+arpack' in spec: + options.extend([ + '-DARPACK_DIR=%s' % spec['arpack-ng'].prefix, + '-DDEAL_II_WITH_ARPACK=ON', + '-DDEAL_II_ARPACK_WITH_PARPACK=ON' + ]) + else: + options.extend([ + '-DDEAL_II_WITH_ARPACK=OFF' + ]) + + # since Netcdf is spread among two, need to do it by hand: + if '+netcdf' in spec: + options.extend([ + '-DNETCDF_FOUND=true', + '-DNETCDF_LIBRARIES=%s;%s' % + (join_path(spec['netcdf-cxx'].prefix.lib,'libnetcdf_c++.%s' % dsuf), + join_path(spec['netcdf'].prefix.lib,'libnetcdf.%s' % dsuf)), + '-DNETCDF_INCLUDE_DIRS=%s;%s' % + (spec['netcdf-cxx'].prefix.include, + spec['netcdf'].prefix.include), + ]) + else: + options.extend([ + '-DDEAL_II_WITH_NETCDF=OFF' + ]) + + # Open Cascade + if '+oce' in spec: + options.extend([ + '-DOPENCASCADE_DIR=%s' % spec['oce'].prefix, + '-DDEAL_II_WITH_OPENCASCADE=ON' + ]) + else: + options.extend([ + '-DDEAL_II_WITH_OPENCASCADE=OFF' + ]) + + cmake('.', *options) + + make() + make("test") + make("install") + + # run some MPI examples with different solvers from PETSc and Trilinos + env['DEAL_II_DIR'] = prefix + print('=====================================') + print('============ EXAMPLES ===============') + print('=====================================') + # take bare-bones step-3 + print('=====================================') + print('============ Step-3 =================') + print('=====================================') + with working_dir('examples/step-3'): + cmake('.') + make('release') + make('run',parallel=False) + + # An example which uses Metis + PETSc + # FIXME: switch step-18 to MPI + with working_dir('examples/step-18'): + print('=====================================') + print('============= Step-18 ===============') + print('=====================================') + # list the number of cycles to speed up + filter_file(r'(end_time = 10;)', ('end_time = 3;'), 'step-18.cc') + if '^petsc' in spec and '^metis' in spec: + cmake('.') + make('release') + make('run',parallel=False) + + # take step-40 which can use both PETSc and Trilinos + # FIXME: switch step-40 to MPI run + with working_dir('examples/step-40'): + print('=====================================') + print('========== Step-40 PETSc ============') + print('=====================================') + # list the number of cycles to speed up + filter_file(r'(const unsigned int n_cycles = 8;)', ('const unsigned int n_cycles = 2;'), 'step-40.cc') + cmake('.') + if '^petsc' in spec: + make('release') + make('run',parallel=False) + + print('=====================================') + print('========= Step-40 Trilinos ==========') + print('=====================================') + # change Linear Algebra to Trilinos + filter_file(r'(\/\/ #define FORCE_USE_OF_TRILINOS.*)', ('#define FORCE_USE_OF_TRILINOS'), 'step-40.cc') + if '^trilinos+hypre' in spec: + make('release') + make('run',parallel=False) + + print('=====================================') + print('=== Step-40 Trilinos SuperluDist ====') + print('=====================================') + # change to direct solvers + filter_file(r'(LA::SolverCG solver\(solver_control\);)', ('TrilinosWrappers::SolverDirect::AdditionalData data(false,"Amesos_Superludist"); TrilinosWrappers::SolverDirect solver(solver_control,data);'), 'step-40.cc') + filter_file(r'(LA::MPI::PreconditionAMG preconditioner;)', (''), 'step-40.cc') + filter_file(r'(LA::MPI::PreconditionAMG::AdditionalData data;)', (''), 'step-40.cc') + filter_file(r'(preconditioner.initialize\(system_matrix, data\);)', (''), 'step-40.cc') + filter_file(r'(solver\.solve \(system_matrix, completely_distributed_solution, system_rhs,)', ('solver.solve (system_matrix, completely_distributed_solution, system_rhs);'), 'step-40.cc') + filter_file(r'(preconditioner\);)', (''), 'step-40.cc') + if '^trilinos+superlu-dist' in spec: + make('release') + make('run',paralle=False) + + print('=====================================') + print('====== Step-40 Trilinos MUMPS =======') + print('=====================================') + # switch to Mumps + filter_file(r'(Amesos_Superludist)', ('Amesos_Mumps'), 'step-40.cc') + if '^trilinos+mumps' in spec: + make('release') + make('run',parallel=False) + + print('=====================================') + print('============ Step-36 ================') + print('=====================================') + with working_dir('examples/step-36'): + if 'slepc' in spec: + cmake('.') + make('release') + make('run',parallel=False) + + print('=====================================') + print('============ Step-54 ================') + print('=====================================') + with working_dir('examples/step-54'): + if 'oce' in spec: + cmake('.') + make('release') + make('run',parallel=False) diff --git a/var/spack/repos/builtin/packages/dia/package.py b/var/spack/repos/builtin/packages/dia/package.py new file mode 100644 index 0000000000000000000000000000000000000000..1cb5910e46db8d049bb357e2db7447ea80a50abb --- /dev/null +++ b/var/spack/repos/builtin/packages/dia/package.py @@ -0,0 +1,34 @@ +from spack import * + +class Dia(Package): + """Dia is a program for drawing structured diagrams.""" + homepage = 'https://wiki.gnome.org/Apps/Dia' + url = 'https://ftp.gnome.org/pub/gnome/sources/dia/0.97/dia-0.97.3.tar.xz' + + version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c') + + depends_on('gtkplus@2.6.0:') + depends_on('cairo') + #depends_on('libart') # optional dependency, not yet supported by spack. + depends_on('libpng') + depends_on('libxslt') + depends_on('python') + depends_on('swig') + # depends_on('py-gtk') # optional dependency, not yet supported by spack. + + def url_for_version(self, version): + """Handle Dia's version-based custom URLs.""" + return 'https://ftp.gnome.org/pub/gnome/source/dia/%s/dia-%s.tar.xz' % (version.up_to(2), version) + + def install(self, spec, prefix): + + # configure, build, install: + options = ['--prefix=%s' % prefix, + '--with-cairo', + '--with-xslt-prefix=%s' % spec['libxslt'].prefix, + '--with-python', + '--with-swig'] + + configure(*options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py index 3d4a4e47a78de86ab3baa80a48984f0fa4b1b1b9..3a1deba9e143353ce36c059423ec48d37b277255 100644 --- a/var/spack/repos/builtin/packages/doxygen/package.py +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -4,6 +4,7 @@ #------------------------------------------------------------------------------ from spack import * +import sys class Doxygen(Package): """Doxygen is the de facto standard tool for generating documentation @@ -17,6 +18,10 @@ class Doxygen(Package): version('1.8.10', '79767ccd986f12a0f949015efb5f058f') depends_on("cmake@2.8.12:") + # flex does not build on OSX, but it's provided there anyway + depends_on("flex", sys.platform != 'darwin') + depends_on("bison", sys.platform != 'darwin') + def install(self, spec, prefix): cmake('.', *std_cmake_args) diff --git a/var/spack/repos/builtin/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py index 0111dcbe08dec710cecaae0c57100ab08b004002..b28e897a0f565817595bdc67a3ed6fa8356393ed 100644 --- a/var/spack/repos/builtin/packages/dyninst/package.py +++ b/var/spack/repos/builtin/packages/dyninst/package.py @@ -31,6 +31,8 @@ class Dyninst(Package): url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" list_url = "http://www.dyninst.org/downloads/dyninst-8.x" + version('9.1.0', '5c64b77521457199db44bec82e4988ac', + url="http://www.paradyn.org/release9.1.0/DyninstAPI-9.1.0.tgz") version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7', url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz") version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a', diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py index 8d6e672f86cfaba8554aca334307ead0fec119be..1501989812d45ca64512cc1aac386b83bece6e6c 100644 --- a/var/spack/repos/builtin/packages/eigen/package.py +++ b/var/spack/repos/builtin/packages/eigen/package.py @@ -45,7 +45,7 @@ class Eigen(Package): # TODO : dependency on googlehash, superlu, adolc missing - depends_on('metis', when='+metis') + depends_on('metis@5:', when='+metis') depends_on('scotch', when='+scotch') depends_on('fftw', when='+fftw') depends_on('suite-sparse', when='+suitesparse') diff --git a/var/spack/repos/builtin/packages/elk/package.py b/var/spack/repos/builtin/packages/elk/package.py new file mode 100644 index 0000000000000000000000000000000000000000..1d9216fd1a9c182d7ee4864d34e5db3bc8c1d58f --- /dev/null +++ b/var/spack/repos/builtin/packages/elk/package.py @@ -0,0 +1,122 @@ +from spack import * +import spack + +class Elk(Package): + '''An all-electron full-potential linearised augmented-plane wave + (FP-LAPW) code with many advanced features.''' + + homepage = 'http://elk.sourceforge.net/' + url = 'https://sourceforge.net/projects/elk/files/elk-3.3.17.tgz' + + version('3.3.17', 'f57f6230d14f3b3b558e5c71f62f0592') + + # Elk provides these libraries, but allows you to specify your own + variant('blas', default=True, description='Build with custom BLAS library') + variant('lapack', default=True, description='Build with custom LAPACK library') + variant('fft', default=True, description='Build with custom FFT library') + + # Elk does not provide these libraries, but allows you to use them + variant('mpi', default=True, description='Enable MPI parallelism') + variant('openmp', default=True, description='Enable OpenMP support') + variant('libxc', default=True, description='Link to Libxc functional library') + + depends_on('blas', when='+blas') + depends_on('lapack', when='+lapack') + depends_on('fftw', when='+fft') + depends_on('mpi', when='+mpi') + depends_on('libxc', when='+libxc') + + # Cannot be built in parallel + parallel = False + + + def configure(self, spec): + # Dictionary of configuration options + config = { + 'MAKE': 'make', + 'F90': join_path(spack.build_env_path, 'f90'), + 'F77': join_path(spack.build_env_path, 'f77'), + 'AR': 'ar', + 'LIB_FFT': 'fftlib.a', + 'SRC_MPI': 'mpi_stub.f90', + 'SRC_OMP': 'omp_stub.f90', + 'SRC_libxc': 'libxcifc_stub.f90', + 'SRC_FFT': 'zfftifc.f90' + } + + # Compiler-specific flags + flags = '' + if self.compiler.name == 'intel': + flags = '-O3 -ip -unroll -no-prec-div -openmp' + elif self.compiler.name == 'gcc': + flags = '-O3 -ffast-math -funroll-loops -fopenmp' + elif self.compiler.name == 'pgi': + flags = '-O3 -mp -lpthread' + elif self.compiler.name == 'g95': + flags = '-O3 -fno-second-underscore' + elif self.compiler.name == 'nag': + flags = '-O4 -kind=byte -dusty -dcfuns' + elif self.compiler.name == 'xl': + flags = '-O3 -qsmp=omp' + config['F90_OPTS'] = flags + config['F77_OPTS'] = flags + + # BLAS/LAPACK support + blas = 'blas.a' + lapack = 'lapack.a' + if '+blas' in spec: + blas = join_path(spec['blas'].prefix.lib, 'libblas.so') + if '+lapack' in spec: + lapack = join_path(spec['lapack'].prefix.lib, 'liblapack.so') + config['LIB_LPK'] = ' '.join([lapack, blas]) # lapack must come before blas + + # FFT support + if '+fft' in spec: + config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib, 'libfftw3.so') + config['SRC_FFT'] = 'zfftifc_fftw.f90' + + # MPI support + if '+mpi' in spec: + config.pop('SRC_MPI') + config['F90'] = join_path(spec['mpi'].prefix.bin, 'mpif90') + config['F77'] = join_path(spec['mpi'].prefix.bin, 'mpif77') + + # OpenMP support + if '+openmp' in spec: + config.pop('SRC_OMP') + + # Libxc support + if '+libxc' in spec: + config['LIB_libxc'] = ' '.join([ + join_path(spec['libxc'].prefix.lib, 'libxcf90.so'), + join_path(spec['libxc'].prefix.lib, 'libxc.so') + ]) + config['SRC_libxc'] = ' '.join([ + 'libxc_funcs.f90', + 'libxc.f90', + 'libxcifc.f90' + ]) + + # Write configuration options to include file + with open('make.inc', 'w') as inc: + for key in config: + inc.write('{0} = {1}\n'.format(key, config[key])) + + + def install(self, spec, prefix): + # Elk only provides an interactive setup script + self.configure(spec) + + make() + make('test') + + # The Elk Makefile does not provide an install target + mkdirp(prefix.bin) + + install('src/elk', prefix.bin) + install('src/eos/eos', prefix.bin) + install('src/spacegroup/spacegroup', prefix.bin) + + install_tree('examples', join_path(prefix, 'examples')) + install_tree('species', join_path(prefix, 'species')) + diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py new file mode 100644 index 0000000000000000000000000000000000000000..45181da41bf8ab0f81b8e486f75e4f2ca1b21493 --- /dev/null +++ b/var/spack/repos/builtin/packages/environment-modules/package.py @@ -0,0 +1,38 @@ +from spack import * + + +class EnvironmentModules(Package): + """The Environment Modules package provides for the dynamic + modification of a user's environment via modulefiles.""" + + homepage = "https://sourceforge.net/p/modules/wiki/Home/" + url = "http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz" + + version('3.2.10', '8b097fdcb90c514d7540bb55a3cb90fb') + + # Dependencies: + depends_on('tcl') + + def install(self, spec, prefix): + tcl_spec = spec['tcl'] + + # See: https://sourceforge.net/p/modules/bugs/62/ + CPPFLAGS = ['-DUSE_INTERP_ERRORLINE'] + config_args = [ + "--without-tclx", + "--with-tclx-ver=0.0", + "--prefix=%s" % prefix, + "--with-tcl=%s" % join_path(tcl_spec.prefix, 'lib'), # It looks for tclConfig.sh + "--with-tcl-ver=%d.%d" % (tcl_spec.version.version[0], tcl_spec.version.version[1]), + '--disable-debug', + '--disable-dependency-tracking', + '--disable-silent-rules', + '--disable-versioning', + '--datarootdir=%s' % prefix.share, + 'CPPFLAGS=%s' % ' '.join(CPPFLAGS) + ] + + + configure(*config_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py index a2bf58f585b5ddf9f6ff2053488fef4f11a9f676..0dad57a9f61b8aac9d513b3a801d921262360aad 100644 --- a/var/spack/repos/builtin/packages/espresso/package.py +++ b/var/spack/repos/builtin/packages/espresso/package.py @@ -24,7 +24,7 @@ class Espresso(Package): depends_on('fftw~mpi', when='~mpi') depends_on('fftw+mpi', when='+mpi') depends_on('scalapack', when='+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation - + def check_variants(self, spec): error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active' if '+scalapack' in spec and '~mpi' in spec: @@ -33,9 +33,10 @@ def check_variants(self, spec): raise RuntimeError(error.format(variant='elpa')) def install(self, spec, prefix): + from glob import glob self.check_variants(spec) - options = ['-prefix=%s' % prefix] + options = ['-prefix=%s' % prefix.bin] if '+mpi' in spec: options.append('--enable-parallel') @@ -61,5 +62,11 @@ def install(self, spec, prefix): configure(*options) make('all') - make('install') + + if spec.architecture.startswith('darwin'): + mkdirp(prefix.bin) + for filename in glob("bin/*.x"): + install(filename, prefix.bin) + else: + make('install') diff --git a/var/spack/repos/builtin/packages/gcc/darwin/gcc-4.9.patch1 b/var/spack/repos/builtin/packages/gcc/darwin/gcc-4.9.patch1 new file mode 100644 index 0000000000000000000000000000000000000000..444e292786df41346a3a1cc6267bba587408a007 --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/darwin/gcc-4.9.patch1 @@ -0,0 +1,42 @@ +diff --git a/gcc/configure b/gcc/configure +index 9523773..52b0bf7 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -24884,7 +24884,7 @@ if test "${gcc_cv_as_ix86_filds+set}" = set; then : + else + gcc_cv_as_ix86_filds=no + if test x$gcc_cv_as != x; then +- $as_echo 'filds mem; fists mem' > conftest.s ++ $as_echo 'filds (%ebp); fists (%ebp)' > conftest.s + if { ac_try='$gcc_cv_as $gcc_cv_as_flags -o conftest.o conftest.s >&5' + { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 + (eval $ac_try) 2>&5 +@@ -24915,7 +24915,7 @@ if test "${gcc_cv_as_ix86_fildq+set}" = set; then : + else + gcc_cv_as_ix86_fildq=no + if test x$gcc_cv_as != x; then +- $as_echo 'fildq mem; fistpq mem' > conftest.s ++ $as_echo 'fildq (%ebp); fistpq (%ebp)' > conftest.s + if { ac_try='$gcc_cv_as $gcc_cv_as_flags -o conftest.o conftest.s >&5' + { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 + (eval $ac_try) 2>&5 +diff --git a/gcc/configure.ac b/gcc/configure.ac +index 68b0ee8..bd53978 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -3869,13 +3869,13 @@ foo: nop + + gcc_GAS_CHECK_FEATURE([filds and fists mnemonics], + gcc_cv_as_ix86_filds,,, +- [filds mem; fists mem],, ++ [filds (%ebp); fists (%ebp)],, + [AC_DEFINE(HAVE_AS_IX86_FILDS, 1, + [Define if your assembler uses filds and fists mnemonics.])]) + + gcc_GAS_CHECK_FEATURE([fildq and fistpq mnemonics], + gcc_cv_as_ix86_fildq,,, +- [fildq mem; fistpq mem],, ++ [fildq (%ebp); fistpq (%ebp)],, + [AC_DEFINE(HAVE_AS_IX86_FILDQ, 1, + [Define if your assembler uses fildq and fistq mnemonics.])]) + diff --git a/var/spack/repos/builtin/packages/gcc/darwin/gcc-4.9.patch2 b/var/spack/repos/builtin/packages/gcc/darwin/gcc-4.9.patch2 new file mode 100644 index 0000000000000000000000000000000000000000..b065997f453926e20d285f8a5e6555d9cd2e8f96 --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/darwin/gcc-4.9.patch2 @@ -0,0 +1,28 @@ +From 82f81877458ea372176eabb5de36329431dce99b Mon Sep 17 00:00:00 2001 +From: Iain Sandoe <iain@codesourcery.com> +Date: Sat, 21 Dec 2013 00:30:18 +0000 +Subject: [PATCH] don't try to mark local symbols as no-dead-strip + +--- + gcc/config/darwin.c | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/gcc/config/darwin.c b/gcc/config/darwin.c +index 40804b8..0080299 100644 +--- a/gcc/config/darwin.c ++++ b/gcc/config/darwin.c +@@ -1259,6 +1259,11 @@ darwin_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED) + void + darwin_mark_decl_preserved (const char *name) + { ++ /* Actually we shouldn't mark any local symbol this way, but for now ++ this only happens with ObjC meta-data. */ ++ if (darwin_label_is_anonymous_local_objc_name (name)) ++ return; ++ + fprintf (asm_out_file, "\t.no_dead_strip "); + assemble_name (asm_out_file, name); + fputc ('\n', asm_out_file); +-- +2.2.1 + diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py index f8958ee290777d85f832a99340492c0305aa6a04..6043b6227932de76ac64fa3298dc87c9ad132312 100644 --- a/var/spack/repos/builtin/packages/gcc/package.py +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -26,6 +26,8 @@ from contextlib import closing from glob import glob +import sys +import os class Gcc(Package): """The GNU Compiler Collection includes front ends for C, C++, @@ -47,24 +49,33 @@ class Gcc(Package): version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') version('4.5.4', '27e459c2566b8209ab064570e1b378f7') - variant('gold', default=True, description="Build the gold linker plugin for ld-based LTO") + variant('binutils', default=sys.platform != 'darwin', + description="Build via binutils") + variant('gold', default=sys.platform != 'darwin', + description="Build the gold linker plugin for ld-based LTO") depends_on("mpfr") depends_on("gmp") depends_on("mpc", when='@4.5:') depends_on("isl", when='@5.0:') - depends_on("binutils~libiberty", when='~gold') - depends_on("binutils~libiberty+gold", when='+gold') + depends_on("binutils~libiberty", when='+binutils ~gold') + depends_on("binutils~libiberty+gold", when='+binutils +gold') + # TODO: integrate these libraries. #depends_on("ppl") #depends_on("cloog") + if sys.platform == 'darwin': + patch('darwin/gcc-4.9.patch1', when='@4.9.3') + patch('darwin/gcc-4.9.patch2', when='@4.9.3') def install(self, spec, prefix): # libjava/configure needs a minor fix to install into spack paths. - filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True) + filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', + string=True) enabled_languages = set(('c', 'c++', 'fortran', 'java', 'objc')) - if spec.satisfies("@4.7.1:"): + + if spec.satisfies("@4.7.1:") and sys.platform != 'darwin': enabled_languages.add('go') # Generic options to compile GCC @@ -72,32 +83,40 @@ def install(self, spec, prefix): "--libdir=%s/lib64" % prefix, "--disable-multilib", "--enable-languages=" + ','.join(enabled_languages), - "--with-mpc=%s" % spec['mpc'].prefix, - "--with-mpfr=%s" % spec['mpfr'].prefix, - "--with-gmp=%s" % spec['gmp'].prefix, + "--with-mpc=%s" % spec['mpc'].prefix, + "--with-mpfr=%s" % spec['mpfr'].prefix, + "--with-gmp=%s" % spec['gmp'].prefix, "--enable-lto", - "--with-gnu-ld", - "--with-gnu-as", "--with-quad"] # Binutils - static_bootstrap_flags = "-static-libstdc++ -static-libgcc" - binutils_options = ["--with-sysroot=/", - "--with-stage1-ldflags=%s %s" % (self.rpath_args, static_bootstrap_flags), - "--with-boot-ldflags=%s %s" % (self.rpath_args, static_bootstrap_flags), - "--with-ld=%s/bin/ld" % spec['binutils'].prefix, - "--with-as=%s/bin/as" % spec['binutils'].prefix] - options.extend(binutils_options) + if spec.satisfies('+binutils'): + static_bootstrap_flags = "-static-libstdc++ -static-libgcc" + binutils_options = ["--with-sysroot=/", + "--with-stage1-ldflags=%s %s" % + (self.rpath_args, static_bootstrap_flags), + "--with-boot-ldflags=%s %s" % + (self.rpath_args, static_bootstrap_flags), + "--with-gnu-ld", + "--with-ld=%s/bin/ld" % spec['binutils'].prefix, + "--with-gnu-as", + "--with-as=%s/bin/as" % spec['binutils'].prefix] + options.extend(binutils_options) # Isl if 'isl' in spec: isl_options = ["--with-isl=%s" % spec['isl'].prefix] options.extend(isl_options) + if sys.platform == 'darwin' : + darwin_options = [ "--with-build-config=bootstrap-debug" ] + options.extend(darwin_options) + build_dir = join_path(self.stage.path, 'spack-build') configure = Executable( join_path(self.stage.source_path, 'configure') ) with working_dir(build_dir, create=True): # Rest of install is straightforward. configure(*options) - make() + if sys.platform == 'darwin' : make("bootstrap") + else: make() make("install") self.write_rpath_specs() @@ -114,7 +133,8 @@ def write_rpath_specs(self): """Generate a spec file so the linker adds a rpath to the libs the compiler used to build the executable.""" if not self.spec_dir: - tty.warn("Could not install specs for %s." % self.spec.format('$_$@')) + tty.warn("Could not install specs for %s." % + self.spec.format('$_$@')) return gcc = Executable(join_path(self.prefix.bin, 'gcc')) @@ -124,5 +144,6 @@ def write_rpath_specs(self): for line in lines: out.write(line + "\n") if line.startswith("*link:"): - out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix)) + out.write("-rpath %s/lib:%s/lib64 \\\n" % + (self.prefix, self.prefix)) set_install_permissions(specs_file) diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py new file mode 100644 index 0000000000000000000000000000000000000000..4f1f1ec2dd1eaa348ee985809cb4e85c26913793 --- /dev/null +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -0,0 +1,69 @@ +from spack import * + +class Gdal(Package): + """ + GDAL is a translator library for raster and vector geospatial + data formats that is released under an X/MIT style Open Source + license by the Open Source Geospatial Foundation. As a library, + it presents a single raster abstract data model and vector + abstract data model to the calling application for all supported + formats. It also comes with a variety of useful command line + utilities for data translation and processing + """ + + homepage = "http://www.gdal.org/" + url = "http://download.osgeo.org/gdal/2.0.2/gdal-2.0.2.tar.gz" + list_url = "http://download.osgeo.org/gdal/" + list_depth = 2 + + version('2.0.2', '573865f3f59ba7b4f8f4cddf223b52a5') + + extends('python') + + variant('hdf5', default=False, description='Enable HDF5 support') + variant('hdf', default=False, description='Enable HDF4 support') + variant('openjpeg', default=False, description='Enable JPEG2000 support') + variant('geos', default=False, description='Enable GEOS support') + variant('kea', default=False, description='Enable KEA support') + variant('netcdf', default=False, description='Enable netcdf support') + + depends_on('swig') + depends_on("hdf5", when='+hdf5') + depends_on("hdf", when='+hdf') + depends_on("openjpeg", when='+openjpeg') + depends_on("geos", when='+geos') + depends_on("kealib", when='+kea') + depends_on("netcdf", when='+netcdf') + depends_on("libtiff") + depends_on("libpng") + depends_on("zlib") + depends_on("proj") + depends_on("py-numpy") + + parallel = False + + def install(self, spec, prefix): + args = [] + args.append("--prefix=%s" % prefix) + args.append("--with-liblzma=yes") + args.append("--with-zlib=%s" % spec['zlib'].prefix) + args.append("--with-python=%s" % spec['python'].prefix.bin + "/python") + args.append("--without-libtool") + + if '+geos' in spec: + args.append('--with-geos=yes') + if '+hdf' in spec: + args.append('--with-hdf4=%s' % spec['hdf'].prefix) + if '+hdf5' in spec: + args.append('--with-hdf5=%s' % spec['hdf5'].prefix) + if '+openjpeg' in spec: + args.append('--with-openjpeg=%s' % spec['openjpeg'].prefix) + if '+kea' in spec: + args.append('--with-kea=yes') + if '+netcdf' in spec: + args.append('--with-netcdf=%s' % spec['netcdf'].prefix) + + configure(*args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py index 4a2657e32f21b5e60048df62f9b9eee94193a536..030703f286e14463e81009d31aa95c9a58f65262 100644 --- a/var/spack/repos/builtin/packages/geos/package.py +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -1,4 +1,5 @@ from spack import * +import os class Geos(Package): """GEOS (Geometry Engine - Open Source) is a C++ port of the Java @@ -10,6 +11,10 @@ class Geos(Package): homepage = "http://trac.osgeo.org/geos/" url = "http://download.osgeo.org/geos/geos-3.4.2.tar.bz2" + # Verison 3.5.0 supports Autotools and CMake + version('3.5.0', '136842690be7f504fba46b3c539438dd') + + # Versions through 3.4.2 have CMake, but only Autotools is supported version('3.4.2', 'fc5df2d926eb7e67f988a43a92683bae') version('3.4.1', '4c930dec44c45c49cd71f3e0931ded7e') version('3.4.0', 'e41318fc76b5dc764a69d43ac6b18488') @@ -21,11 +26,22 @@ class Geos(Package): version('3.3.4', '1bb9f14d57ef06ffa41cb1d67acb55a1') version('3.3.3', '8454e653d7ecca475153cc88fd1daa26') - extends('python') - depends_on('swig') +# # Python3 is not supported. +# variant('python', default=False, description='Enable Python support') + +# extends('python', when='+python') +# depends_on('python', when='+python') +# depends_on('swig', when='+python') def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--enable-python") + args = ["--prefix=%s" % prefix] +# if '+python' in spec: +# os.environ['PYTHON'] = join_path(spec['python'].prefix, 'bin', +# 'python' if spec['python'].version[:1][0] <= 2 else 'python3') +# os.environ['SWIG'] = join_path(spec['swig'].prefix, 'bin', 'swig') +# +# args.append("--enable-python") + + configure(*args) make() make("install") diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py new file mode 100644 index 0000000000000000000000000000000000000000..05712d7392cdb94b56e80b685ec5e14e12398b10 --- /dev/null +++ b/var/spack/repos/builtin/packages/gettext/package.py @@ -0,0 +1,30 @@ +from spack import * + +class Gettext(Package): + """GNU internationalization (i18n) and localization (l10n) library.""" + homepage = "https://www.gnu.org/software/gettext/" + url = "http://ftpmirror.gnu.org/gettext/gettext-0.19.7.tar.xz" + + version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5') + + def install(self, spec, prefix): + options = ['--disable-dependency-tracking', + '--disable-silent-rules', + '--disable-debug', + '--prefix=%s' % prefix, + '--with-included-gettext', + '--with-included-glib', + '--with-included-libcroco', + '--with-included-libunistring', + '--with-emacs', + '--with-lispdir=%s/emacs/site-lisp/gettext' % prefix.share, + '--disable-java', + '--disable-csharp', + '--without-git', # Don't use VCS systems to create these archives + '--without-cvs', + '--without-xz'] + + configure(*options) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index 586b6ce3c39c1a68a838f5209a12839f10e2cc7d..388f84aefdb017e1218977f84fa930ef918bdc6f 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -36,6 +36,8 @@ class Git(Package): depends_on("curl", when="+curl") depends_on("expat", when="+expat") + # Also depends_on gettext: apt-get install gettext (Ubuntu) + # Use system perl for now. # depends_on("perl") # depends_on("pcre") diff --git a/var/spack/repos/builtin/packages/global/package.py b/var/spack/repos/builtin/packages/global/package.py index e8f06516d9a9add46f9c34f5539e4e9c75f542a6..aac1cede30f39235a08501c8577a48949baea26f 100644 --- a/var/spack/repos/builtin/packages/global/package.py +++ b/var/spack/repos/builtin/packages/global/package.py @@ -11,6 +11,7 @@ class Global(Package): version('6.5', 'dfec818b4f53d91721e247cf7b218078') depends_on('exuberant-ctags') + depends_on('ncurses') def install(self, spec, prefix): config_args = ['--prefix={0}'.format(prefix)] diff --git a/var/spack/repos/builtin/packages/googletest/package.py b/var/spack/repos/builtin/packages/googletest/package.py new file mode 100644 index 0000000000000000000000000000000000000000..663b758747becb83c4b4b058a6c84fd08cd5f1db --- /dev/null +++ b/var/spack/repos/builtin/packages/googletest/package.py @@ -0,0 +1,24 @@ +from spack import * + +class Googletest(Package): + """Google test framework for C++. Also called gtest.""" + homepage = "https://github.com/google/googletest" + url = "https://github.com/google/googletest/tarball/release-1.7.0" + + version('1.7.0', '5eaf03ed925a47b37c8e1d559eb19bc4') + + depends_on("cmake") + + def install(self, spec, prefix): + which('cmake')('.', *std_cmake_args) + + make() + + # Google Test doesn't have a make install + # We have to do our own install here. + install_tree('include', prefix.include) + + mkdirp(prefix.lib) + install('./libgtest.a', '%s' % prefix.lib) + install('./libgtest_main.a', '%s' % prefix.lib) + diff --git a/var/spack/repos/builtin/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py index 7af7da1881afad7301a70685e519bee419c7c3c6..ecf92620d4a82d333989a26e528137d47ec5c50c 100644 --- a/var/spack/repos/builtin/packages/graphviz/package.py +++ b/var/spack/repos/builtin/packages/graphviz/package.py @@ -7,6 +7,12 @@ class Graphviz(Package): version('2.38.0', '5b6a829b2ac94efcd5fa3c223ed6d3ae') + # By default disable optional Perl language support to prevent build issues + # related to missing Perl packages. If spack begins support for Perl in the + # future, this package can be updated to depend_on('perl') and the + # ncecessary devel packages. + variant('perl', default=False, description='Enable if you need the optional Perl language bindings.') + parallel = False depends_on("swig") @@ -14,8 +20,10 @@ class Graphviz(Package): depends_on("ghostscript") def install(self, spec, prefix): - configure("--prefix=%s" %prefix) + options = ['--prefix=%s' % prefix] + if not '+perl' in spec: + options.append('--disable-perl') + configure(*options) make() make("install") - diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index 513a38ee8a49f931cd282a76109a1efd828b069f..cce609eb291b0a9b05f7c42d660ae3d76fb21161 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -37,6 +37,7 @@ class Hdf5(Package): list_url = "http://www.hdfgroup.org/ftp/HDF5/releases" list_depth = 3 + version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199') version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618') version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') version('1.8.13', 'c03426e9e77d7766944654280b467289') @@ -80,10 +81,16 @@ def install(self, spec, prefix): # sanity check in configure, so this doesn't merit a variant. extra_args.append("--enable-unsupported") - if '+debug' in spec: - extra_args.append('--enable-debug=all') + if spec.satisfies('@1.10:'): + if '+debug' in spec: + extra_args.append('--enable-build-mode=debug') + else: + extra_args.append('--enable-build-mode=production') else: - extra_args.append('--enable-production') + if '+debug' in spec: + extra_args.append('--enable-debug=all') + else: + extra_args.append('--enable-production') if '+shared' in spec: extra_args.append('--enable-shared') @@ -94,10 +101,10 @@ def install(self, spec, prefix): extra_args.append('--enable-cxx') if '+fortran' in spec: - extra_args.extend([ - '--enable-fortran', - '--enable-fortran2003' - ]) + extra_args.append('--enable-fortran') + # '--enable-fortran2003' no longer exists as of version 1.10.0 + if spec.satisfies('@:1.8.16'): + extra_args.append('--enable-fortran2003') if '+mpi' in spec: # The HDF5 configure script warns if cxx and mpi are enabled @@ -139,5 +146,7 @@ def url_for_version(self, version): return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz" elif version < Version("1.7"): return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz" - else: + elif version < Version("1.10"): return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz" + else: + return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz" diff --git a/var/spack/repos/builtin/packages/hoomd-blue/package.py b/var/spack/repos/builtin/packages/hoomd-blue/package.py new file mode 100644 index 0000000000000000000000000000000000000000..d310b7687a5a73976cc4f66f702b3002be78ac7d --- /dev/null +++ b/var/spack/repos/builtin/packages/hoomd-blue/package.py @@ -0,0 +1,73 @@ +from spack import * +import os + +class HoomdBlue(Package): + """HOOMD-blue is a general-purpose particle simulation toolkit. It scales + from a single CPU core to thousands of GPUs. + + You define particle initial conditions and interactions in a high-level + python script. Then tell HOOMD-blue how you want to execute the job and it + takes care of the rest. Python job scripts give you unlimited flexibility + to create custom initialization routines, control simulation parameters, + and perform in situ analysis.""" + + homepage = "https://codeblue.umich.edu/hoomd-blue/index.html" + url = "https://bitbucket.org/glotzer/hoomd-blue/get/v1.3.3.tar.bz2" + + version('1.3.3', '1469ef4531dc14b579c0acddbfe6a273') + + variant('mpi', default=True, description='Compile with MPI enabled') + variant('cuda', default=True, description='Compile with CUDA Toolkit') + variant('doc', default=True, description='Generate documentation') + + extends('python') + depends_on('py-numpy') + depends_on('boost+python') + depends_on('cmake') + depends_on('mpi', when='+mpi') + depends_on('cuda', when='+cuda') + depends_on('doxygen', when='+doc') + + def install(self, spec, prefix): + + cmake_args = [ + '-DPYTHON_EXECUTABLE=%s/python' % spec['python'].prefix.bin, + '-DBOOST_ROOT=%s' % spec['boost' ].prefix + ] + + # MPI support + if '+mpi' in spec: + os.environ['MPI_HOME'] = spec['mpi'].prefix + cmake_args.append('-DENABLE_MPI=ON') + else: + cmake_args.append('-DENABLE_MPI=OFF') + + # CUDA support + if '+cuda' in spec: + cmake_args.append('-DENABLE_CUDA=ON') + else: + cmake_args.append('-DENABLE_CUDA=OFF') + + # CUDA-aware MPI library support + #if '+cuda' in spec and '+mpi' in spec: + # cmake_args.append('-DENABLE_MPI_CUDA=ON') + #else: + # cmake_args.append('-DENABLE_MPI_CUDA=OFF') + + # There may be a bug in the MPI-CUDA code. See: + # https://groups.google.com/forum/#!msg/hoomd-users/2griTESmc5I/E69s_M5fDwAJ + # This prevented "make test" from passing for me. + cmake_args.append('-DENABLE_MPI_CUDA=OFF') + + # Documentation + if '+doc' in spec: + cmake_args.append('-DENABLE_DOXYGEN=ON') + else: + cmake_args.append('-DENABLE_DOXYGEN=OFF') + + cmake_args.extend(std_cmake_args) + cmake('.', *cmake_args) + + make() + make("test") + make("install") diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index 242ee100d7a071ed7825973310754ce5d2185c21..4b915daa6877fc3cbc6b84ab9d300fb1b95321a3 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -1,5 +1,5 @@ from spack import * -import os +import os, sys class Hypre(Package): """Hypre is a library of high performance preconditioners that @@ -12,7 +12,10 @@ class Hypre(Package): version('2.10.1', 'dc048c4cabb3cd549af72591474ad674') version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') - variant('shared', default=True, description="Build shared library version (disables static library)") + # hypre does not know how to build shared libraries on Darwin + variant('shared', default=sys.platform!='darwin', description="Build shared library version (disables static library)") + # SuperluDist have conflicting headers with those in Hypre + variant('internal-superlu', default=True, description="Use internal Superlu routines") depends_on("mpi") depends_on("blas") @@ -37,6 +40,12 @@ def install(self, spec, prefix): if '+shared' in self.spec: configure_args.append("--enable-shared") + if '~internal-superlu' in self.spec: + configure_args.append("--without-superlu") + # MLI and FEI do not build without superlu on Linux + configure_args.append("--without-mli") + configure_args.append("--without-fei") + # Hypre's source is staged under ./src so we'll have to manually # cd into it. with working_dir("src"): diff --git a/var/spack/repos/builtin/packages/ipopt/package.py b/var/spack/repos/builtin/packages/ipopt/package.py new file mode 100644 index 0000000000000000000000000000000000000000..13c37bf79c4d277f9e6bbaab42b07673268e5233 --- /dev/null +++ b/var/spack/repos/builtin/packages/ipopt/package.py @@ -0,0 +1,51 @@ +from spack import * + +class Ipopt(Package): + """Ipopt (Interior Point OPTimizer, pronounced eye-pea-Opt) is a + software package for large-scale nonlinear optimization.""" + homepage = "https://projects.coin-or.org/Ipopt" + url = "http://www.coin-or.org/download/source/Ipopt/Ipopt-3.12.4.tgz" + + version('3.12.4', '12a8ecaff8dd90025ddea6c65b49cb03') + version('3.12.3', 'c560cbfa9cbf62acf8b485823c255a1b') + version('3.12.2', 'ec1e855257d7de09e122c446506fb00d') + version('3.12.1', 'ceaf895ce80c77778f2cab68ba9f17f3') + version('3.12.0', 'f7dfc3aa106a6711a85214de7595e827') + + depends_on("blas") + depends_on("lapack") + depends_on("pkg-config") + depends_on("mumps+double~mpi") + + def install(self, spec, prefix): + # Dependency directories + blas_dir = spec['blas'].prefix + lapack_dir = spec['lapack'].prefix + mumps_dir = spec['mumps'].prefix + + # Add directory with fake MPI headers in sequential MUMPS + # install to header search path + mumps_flags = "-ldmumps -lmumps_common -lpord -lmpiseq" + mumps_libcmd = "-L%s " % mumps_dir.lib + mumps_flags + + # By convention, spack links blas & lapack libs to libblas & liblapack + blas_lib = "-L%s" % blas_dir.lib + " -lblas" + lapack_lib = "-L%s" % lapack_dir.lib + " -llapack" + + configure_args = [ + "--prefix=%s" % prefix, + "--with-mumps-incdir=%s" % mumps_dir.include, + "--with-mumps-lib=%s" % mumps_libcmd, + "--enable-shared", + "--with-blas-incdir=%s" % blas_dir.include, + "--with-blas-lib=%s" % blas_lib, + "--with-lapack-incdir=%s" % lapack_dir.include, + "--with-lapack-lib=%s" % lapack_lib + ] + + configure(*configure_args) + + # IPOPT does not build correctly in parallel on OS X + make(parallel=False) + make("test", parallel=False) + make("install", parallel=False) diff --git a/var/spack/repos/builtin/packages/julia/openblas.patch b/var/spack/repos/builtin/packages/julia/openblas.patch new file mode 100644 index 0000000000000000000000000000000000000000..f75d7dd04f895cc722a969fadd5cd39a55fd6227 --- /dev/null +++ b/var/spack/repos/builtin/packages/julia/openblas.patch @@ -0,0 +1,68 @@ +diff --git a/deps/Makefile b/deps/Makefile +index 6cb73be..bcd8520 100644 +--- a/deps/Makefile ++++ b/deps/Makefile +@@ -1049,7 +1049,7 @@ OPENBLAS_BUILD_OPTS += NO_AFFINITY=1 + + # Build for all architectures - required for distribution + ifeq ($(OPENBLAS_DYNAMIC_ARCH), 1) +-OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1 ++OPENBLAS_BUILD_OPTS += DYNAMIC_ARCH=1 MAKE_NO_J=1 + endif + + # 64-bit BLAS interface +@@ -1085,6 +1085,7 @@ OPENBLAS_BUILD_OPTS += NO_AVX2=1 + endif + + $(OPENBLAS_SRC_DIR)/config.status: $(OPENBLAS_SRC_DIR)/Makefile ++ cd $(dir $@) && patch -p1 < ../openblas-make.patch + ifeq ($(OS),WINNT) + cd $(dir $@) && patch -p1 < ../openblas-win64.patch + endif +diff --git a/deps/openblas.version b/deps/openblas.version +index 7c97e1b..58b9467 100644 +--- a/deps/openblas.version ++++ b/deps/openblas.version +@@ -1,2 +1,2 @@ +-OPENBLAS_BRANCH=v0.2.15 +-OPENBLAS_SHA1=53e849f4fcae4363a64576de00e982722c7304f9 ++OPENBLAS_BRANCH=v0.2.17 ++OPENBLAS_SHA1=a71e8c82f6a9f73093b631e5deab1e8da716b61f +--- a/deps/openblas-make.patch ++++ b/deps/openblas-make.patch +@@ -0,0 +1,35 @@ ++diff --git a/Makefile.system b/Makefile.system ++index b89f60e..2dbdad0 100644 ++--- a/Makefile.system +++++ b/Makefile.system ++@@ -139,6 +139,10 @@ NO_PARALLEL_MAKE=0 ++ endif ++ GETARCH_FLAGS += -DNO_PARALLEL_MAKE=$(NO_PARALLEL_MAKE) ++ +++ifdef MAKE_NO_J +++GETARCH_FLAGS += -DMAKE_NO_J=$(MAKE_NO_J) +++endif +++ ++ ifdef MAKE_NB_JOBS ++ GETARCH_FLAGS += -DMAKE_NB_JOBS=$(MAKE_NB_JOBS) ++ endif ++diff --git a/getarch.c b/getarch.c ++index f9c49e6..dffad70 100644 ++--- a/getarch.c +++++ b/getarch.c ++@@ -1012,6 +1012,7 @@ int main(int argc, char *argv[]){ ++ #endif ++ #endif ++ +++#ifndef MAKE_NO_J ++ #ifdef MAKE_NB_JOBS ++ printf("MAKE += -j %d\n", MAKE_NB_JOBS); ++ #elif NO_PARALLEL_MAKE==1 ++@@ -1021,6 +1022,7 @@ int main(int argc, char *argv[]){ ++ printf("MAKE += -j %d\n", get_num_cores()); ++ #endif ++ #endif +++#endif ++ ++ break; ++ diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py index 6900af38e4f13941f4fc0d10c51b0766f7632526..25d782266b3502d5face812b265bdacc8e383396 100644 --- a/var/spack/repos/builtin/packages/julia/package.py +++ b/var/spack/repos/builtin/packages/julia/package.py @@ -4,43 +4,56 @@ class Julia(Package): """The Julia Language: A fresh approach to technical computing""" homepage = "http://julialang.org" - url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz" + url = "https://github.com/JuliaLang/julia/releases/download/v0.4.3/julia-0.4.3-full.tar.gz" - version('0.4.3', '7b9f096798fca4bef262a64674bc2b52') - version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06') + version('master', + git='https://github.com/JuliaLang/julia.git', branch='master') + version('0.4.5', '69141ff5aa6cee7c0ec8c85a34aa49a6') + version('0.4.3', '8a4a59fd335b05090dd1ebefbbe5aaac') patch('gc.patch') + patch('openblas.patch', when='@0.4:0.4.5') - # Build-time dependencies - depends_on("cmake @2.8:") + # Build-time dependencies: # depends_on("awk") # depends_on("m4") # depends_on("pkg-config") - depends_on("python @2.6:2.9") - # I think that Julia requires the dependencies above, but it builds find (on - # my system) without these. We should enable them as necessary. + # Combined build-time and run-time dependencies: + depends_on("binutils") + depends_on("cmake @2.8:") + depends_on("git") + depends_on("openssl") + depends_on("python @2.7:2.999") + + # I think that Julia requires the dependencies above, but it + # builds fine (on my system) without these. We should enable them + # as necessary. - # Run-time dependencies + # Run-time dependencies: # depends_on("arpack") # depends_on("fftw +float") # depends_on("gmp") + # depends_on("libgit") # depends_on("mpfr") + # depends_on("openblas") # depends_on("pcre2") - # ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia. + # ARPACK: Requires BLAS and LAPACK; needs to use the same version + # as Julia. - # BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS - # has an option for this; make it available as variant. + # BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit + # systems. OpenBLAS has an option for this; make it available as + # variant. - # FFTW: Something doesn't work when using a pre-installed FFTW library; need - # to investigate. + # FFTW: Something doesn't work when using a pre-installed FFTW + # library; need to investigate. - # GMP, MPFR: Something doesn't work when using a pre-installed FFTW library; - # need to investigate. + # GMP, MPFR: Something doesn't work when using a pre-installed + # FFTW library; need to investigate. - # LLVM: Julia works only with specific versions, and might require patches. - # Thus we let Julia install its own LLVM. + # LLVM: Julia works only with specific versions, and might require + # patches. Thus we let Julia install its own LLVM. # Other possible dependencies: # USE_SYSTEM_OPENLIBM=0 @@ -50,11 +63,21 @@ class Julia(Package): # USE_SYSTEM_UTF8PROC=0 # USE_SYSTEM_LIBGIT2=0 + # Run-time dependencies for Julia packages: + depends_on("hdf5") + depends_on("mpi") + def install(self, spec, prefix): - # Explicitly setting CC, CXX, or FC breaks building libuv, one of - # Julia's dependencies. This might be a Darwin-specific problem. Given - # how Spack sets up compilers, Julia should still use Spack's compilers, - # even if we don't specify them explicitly. + if '@master' in spec: + # Julia needs to know the offset from a specific commit + git = which('git') + git('fetch', '--unshallow') + + # Explicitly setting CC, CXX, or FC breaks building libuv, one + # of Julia's dependencies. This might be a Darwin-specific + # problem. Given how Spack sets up compilers, Julia should + # still use Spack's compilers, even if we don't specify them + # explicitly. options = [#"CC=cc", #"CXX=c++", #"FC=fc", diff --git a/var/spack/repos/builtin/packages/kealib/package.py b/var/spack/repos/builtin/packages/kealib/package.py new file mode 100644 index 0000000000000000000000000000000000000000..475d21e1d8059e0db02b67146dcf48004734f6c9 --- /dev/null +++ b/var/spack/repos/builtin/packages/kealib/package.py @@ -0,0 +1,35 @@ +from spack import * + +class Kealib(Package): + """An HDF5 Based Raster File Format + + KEALib provides an implementation of the GDAL data model. + The format supports raster attribute tables, image pyramids, + meta-data and in-built statistics while also handling very + large files and compression throughout. + + Based on the HDF5 standard, it also provides a base from which + other formats can be derived and is a good choice for long + term data archiving. An independent software library (libkea) + provides complete access to the KEA image format and a GDAL + driver allowing KEA images to be used from any GDAL supported software. + + Development work on this project has been funded by Landcare Research. + """ + homepage = "http://kealib.org/" + url = "https://bitbucket.org/chchrsc/kealib/get/kealib-1.4.5.tar.gz" + + version('1.4.5', '112e9c42d980b2d2987a3c15d0833a5d') + + depends_on("hdf5") + + def install(self, spec, prefix): + with working_dir('trunk', create=False): + cmake_args = [] + cmake_args.append("-DCMAKE_INSTALL_PREFIX=%s" % prefix) + cmake_args.append("-DHDF5_INCLUDE_DIR=%s" % spec['hdf5'].prefix.include) + cmake_args.append("-DHDF5_LIB_PATH=%s" % spec['hdf5'].prefix.lib) + cmake('.', *cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py index 00736b7811af926e615d3aa5a0f9fc880a0916f9..d5d779f796750092b9967f27b3a0df84c9d4100b 100644 --- a/var/spack/repos/builtin/packages/libdrm/package.py +++ b/var/spack/repos/builtin/packages/libdrm/package.py @@ -2,7 +2,7 @@ class Libdrm(Package): """A userspace library for accessing the DRM, direct - rendering manager, on Linux, BSD and other operating + rendering manager, on Linux, BSD and other operating systems that support the ioctl interface.""" homepage = "http://dri.freedesktop.org/libdrm/" # no real website... @@ -11,6 +11,8 @@ class Libdrm(Package): version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') version('2.4.33', '86e4e3debe7087d5404461e0032231c8') + depends_on('libpciaccess') + def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py index 9f16708af5b913ed209b8a23e582dd1db068f4ad..29bc21b65c49ec3543a4bf1e68d93d764ed97ec4 100644 --- a/var/spack/repos/builtin/packages/libelf/package.py +++ b/var/spack/repos/builtin/packages/libelf/package.py @@ -38,8 +38,6 @@ class Libelf(Package): provides('elf') - sanity_check_is_file = 'include/libelf.h' - def install(self, spec, prefix): configure("--prefix=" + prefix, "--enable-shared", diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py index e02b08663e099e092e6f108227164e5e9648f73f..9d5782896e9a6c8ea6390dcd7841020e08a1ffed 100644 --- a/var/spack/repos/builtin/packages/libpng/package.py +++ b/var/spack/repos/builtin/packages/libpng/package.py @@ -8,6 +8,11 @@ class Libpng(Package): version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') version('1.6.15', '829a256f3de9307731d4f52dc071916d') version('1.6.14', '2101b3de1d5f348925990f9aa8405660') + version('1.5.26', '3ca98347a5541a2dad55cd6d07ee60a9') + version('1.4.19', '89bcbc4fc8b31f4a403906cf4f662330') + version('1.2.56', '9508fc59d10a1ffadd9aae35116c19ee') + + depends_on('zlib') def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py new file mode 100644 index 0000000000000000000000000000000000000000..010a5918c56e6664a4f9942ee0915bd5d7341641 --- /dev/null +++ b/var/spack/repos/builtin/packages/libxc/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Libxc(Package): + """Libxc is a library of exchange-correlation functionals for + density-functional theory.""" + + homepage = "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc" + url = "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz" + + version('2.2.2', 'd9f90a0d6e36df6c1312b6422280f2ec') + + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--enable-shared') + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py index 1dd5954c9978bb2ad4bad9f53aba8b2ff39c5aaf..d7d94c4546e33a3dd6f017b2c45b1ae8c6f8822f 100644 --- a/var/spack/repos/builtin/packages/libxcb/package.py +++ b/var/spack/repos/builtin/packages/libxcb/package.py @@ -14,6 +14,9 @@ class Libxcb(Package): depends_on("python") depends_on("xcb-proto") + # depends_on('pthread') # Ubuntu: apt-get install libpthread-stubs0-dev + # depends_on('xau') # Ubuntu: apt-get install libxau-dev + def patch(self): filter_file('typedef struct xcb_auth_info_t {', 'typedef struct {', 'src/xcb.h') diff --git a/var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch b/var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch new file mode 100644 index 0000000000000000000000000000000000000000..b182b167b90ab65b2b397327786409e355b8c0c0 --- /dev/null +++ b/var/spack/repos/builtin/packages/metis/install_gklib_defs_rename.patch @@ -0,0 +1,22 @@ +# HG changeset patch +# User Sean Farley <sean@mcs.anl.gov> +# Date 1332269671 18000 +# Tue Mar 20 13:54:31 2012 -0500 +# Node ID b95c0c2e1d8bf8e3273f7d45e856f0c0127d998e +# Parent 88049269953c67c3fdcc4309bf901508a875f0dc +cmake: add gklib headers to install into include + +diff -r 88049269953c -r b95c0c2e1d8b libmetis/CMakeLists.txt +Index: libmetis/CMakeLists.txt +=================================================================== +--- a/libmetis/CMakeLists.txt Tue Mar 20 13:54:29 2012 -0500 ++++ b/libmetis/CMakeLists.txt Tue Mar 20 13:54:31 2012 -0500 +@@ -12,6 +12,8 @@ endif() + if(METIS_INSTALL) + install(TARGETS metis + LIBRARY DESTINATION lib + RUNTIME DESTINATION lib + ARCHIVE DESTINATION lib) ++ install(FILES gklib_defs.h DESTINATION include) ++ install(FILES gklib_rename.h DESTINATION include) + endif() diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py index bbfc4de7d1d129b66290f1f8a833aa422cff1e1e..41e3ebb429f0b5fdeee5cdd2e2e8288b9bdb240c 100644 --- a/var/spack/repos/builtin/packages/metis/package.py +++ b/var/spack/repos/builtin/packages/metis/package.py @@ -24,7 +24,7 @@ ############################################################################## from spack import * - +import glob, sys, os class Metis(Package): """ @@ -36,7 +36,10 @@ class Metis(Package): homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview' url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz" - version('5.1.0', '5465e67079419a69e0116de24fce58fe') + version('5.1.0', '5465e67079419a69e0116de24fce58fe', + url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz') + version('4.0.3', '5efa35de80703c1b2c4d0de080fafbcf4e0d363a21149a1ad2f96e0144841a55', + url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD/metis-4.0.3.tar.gz') variant('shared', default=True, description='Enables the build of shared libraries') variant('debug', default=False, description='Builds the library in debug mode') @@ -45,10 +48,85 @@ class Metis(Package): variant('idx64', default=False, description='Use int64_t as default index type') variant('double', default=False, description='Use double precision floating point types') - depends_on('cmake @2.8:') # build-time dependency - + depends_on('cmake @2.8:', when='@5:') # build-time dependency depends_on('gdb', when='+gdb') + patch('install_gklib_defs_rename.patch', when='@5:') + + + @when('@4:4.0.3') + def install(self, spec, prefix): + + if '+gdb' in spec: + raise InstallError('gdb support not implemented in METIS 4!') + if '+idx64' in spec: + raise InstallError('idx64 option not implemented in METIS 4!') + if '+double' in spec: + raise InstallError('double option not implemented for METIS 4!') + + options = ['COPTIONS=-fPIC'] + if '+debug' in spec: + options.append('OPTFLAGS=-g -O0') + make(*options) + + mkdir(prefix.bin) + for x in ('pmetis', 'kmetis', 'oemetis', 'onmetis', 'partnmesh', + 'partdmesh', 'mesh2nodal', 'mesh2dual', 'graphchk'): + install(x, prefix.bin) + + mkdir(prefix.lib) + install('libmetis.a', prefix.lib) + + mkdir(prefix.include) + for h in glob.glob(join_path('Lib', '*.h')): + install(h, prefix.include) + + mkdir(prefix.share) + for f in (join_path(*p) + for p in (('Programs', 'io.c'), + ('Test','mtest.c'), + ('Graphs','4elt.graph'), + ('Graphs', 'metis.mesh'), + ('Graphs', 'test.mgraph'))): + install(f, prefix.share) + + if '+shared' in spec: + if sys.platform == 'darwin': + lib_dsuffix = 'dylib' + load_flag = '-Wl,-all_load' + no_load_flag = '' + else: + lib_dsuffix = 'so' + load_flag = '-Wl,-whole-archive' + no_load_flag = '-Wl,-no-whole-archive' + + os.system(spack_cc + ' -fPIC -shared ' + load_flag + + ' libmetis.a ' + no_load_flag + ' -o libmetis.' + + lib_dsuffix) + install('libmetis.' + lib_dsuffix, prefix.lib) + + # Set up and run tests on installation + symlink(join_path(prefix.share, 'io.c'), 'io.c') + symlink(join_path(prefix.share, 'mtest.c'), 'mtest.c') + os.system(spack_cc + ' -I%s' % prefix.include + ' -c io.c') + os.system(spack_cc + ' -I%s' % prefix.include + + ' -L%s' % prefix.lib + ' -lmetis mtest.c io.o -o mtest') + _4eltgraph = join_path(prefix.share, '4elt.graph') + test_mgraph = join_path(prefix.share, 'test.mgraph') + metis_mesh = join_path(prefix.share, 'metis.mesh') + kmetis = join_path(prefix.bin, 'kmetis') + os.system('./mtest ' + _4eltgraph) + os.system(kmetis + ' ' + _4eltgraph + ' 40') + os.system(join_path(prefix.bin, 'onmetis') + ' ' + _4eltgraph) + os.system(join_path(prefix.bin, 'pmetis') + ' ' + test_mgraph + ' 2') + os.system(kmetis + ' ' + test_mgraph + ' 2') + os.system(kmetis + ' ' + test_mgraph + ' 5') + os.system(join_path(prefix.bin, 'partnmesh') + metis_mesh + ' 10') + os.system(join_path(prefix.bin, 'partdmesh') + metis_mesh + ' 10') + os.system(join_path(prefix.bin, 'mesh2dual') + metis_mesh) + + + @when('@5:') def install(self, spec, prefix): options = [] @@ -77,7 +155,36 @@ def install(self, spec, prefix): if '+double' in spec: filter_file('REALTYPEWIDTH 32', 'REALTYPEWIDTH 64', metis_header) + # Make clang 7.3 happy. + # Prevents "ld: section __DATA/__thread_bss extends beyond end of file" + # See upstream LLVM issue https://llvm.org/bugs/show_bug.cgi?id=27059 + # Adopted from https://github.com/Homebrew/homebrew-science/blob/master/metis.rb + if spec.satisfies('%clang@7.3.0'): + filter_file('#define MAX_JBUFS 128', '#define MAX_JBUFS 24', join_path(source_directory, 'GKlib', 'error.c')) + with working_dir(build_directory, create=True): cmake(source_directory, *options) make() - make("install") \ No newline at end of file + make("install") + # now run some tests: + for f in ["4elt", "copter2", "mdual"]: + graph = join_path(source_directory,'graphs','%s.graph' % f) + Executable(join_path(prefix.bin,'graphchk'))(graph) + Executable(join_path(prefix.bin,'gpmetis'))(graph,'2') + Executable(join_path(prefix.bin,'ndmetis'))(graph) + + graph = join_path(source_directory,'graphs','test.mgraph') + Executable(join_path(prefix.bin,'gpmetis'))(graph,'2') + graph = join_path(source_directory,'graphs','metis.mesh') + Executable(join_path(prefix.bin,'mpmetis'))(graph,'2') + + # install GKlib headers, which will be needed for ParMETIS + GKlib_dist = join_path(prefix.include,'GKlib') + mkdirp(GKlib_dist) + fs = glob.glob(join_path(source_directory,'GKlib',"*.h")) + for f in fs: + install(f, GKlib_dist) + + # The shared library is not installed correctly on Darwin; correct this + if (sys.platform == 'darwin') and ('+shared' in spec): + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index b20dc8dd60cce764851683105ddc34a614055b48..b317ec6651dbc5412bf5be18e9ad007f459430eb 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -47,12 +47,12 @@ class Mpich(Package): provides('mpi@:3.0', when='@3:') provides('mpi@:1.3', when='@1:') - def setup_dependent_environment(self, env, dependent_spec): - env.set('MPICH_CC', spack_cc) - env.set('MPICH_CXX', spack_cxx) - env.set('MPICH_F77', spack_f77) - env.set('MPICH_F90', spack_f90) - env.set('MPICH_FC', spack_fc) + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + spack_env.set('MPICH_CC', spack_cc) + spack_env.set('MPICH_CXX', spack_cxx) + spack_env.set('MPICH_F77', spack_f77) + spack_env.set('MPICH_F90', spack_fc) + spack_env.set('MPICH_FC', spack_fc) def setup_dependent_package(self, module, dep_spec): """For dependencies, make mpicc's use spack wrapper.""" @@ -78,6 +78,9 @@ def install(self, spec, prefix): if not self.compiler.fc: config_args.append("--disable-fc") + if not self.compiler.fc and not self.compiler.f77: + config_args.append("--disable-fortran") + configure(*config_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch b/var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch new file mode 100644 index 0000000000000000000000000000000000000000..53294fbbc63d2e54fc84ead56f833f0476d23b44 --- /dev/null +++ b/var/spack/repos/builtin/packages/mrnet/krell-5.0.1.patch @@ -0,0 +1,154 @@ +--- mrnet-3093918/include/mrnet/Types.h 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/include/mrnet/Types.h 2016-03-16 12:29:33.986132302 -0700 +@@ -23,7 +23,7 @@ + #ifndef MRNET_VERSION_MAJOR + # define MRNET_VERSION_MAJOR 5 + # define MRNET_VERSION_MINOR 0 +-# define MRNET_VERSION_REV 0 ++# define MRNET_VERSION_REV 1 + #endif + + namespace MRN +--- mrnet-3093918/include/mrnet_lightweight/Types.h 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/include/mrnet_lightweight/Types.h 2016-03-16 12:29:33.987132302 -0700 +@@ -30,7 +30,7 @@ + #ifndef MRNET_VERSION_MAJOR + #define MRNET_VERSION_MAJOR 5 + #define MRNET_VERSION_MINOR 0 +-#define MRNET_VERSION_REV 0 ++#define MRNET_VERSION_REV 1 + #endif + void get_Version(int* major, + int* minor, +--- mrnet-3093918/src/lightweight/SerialGraph.c 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/src/lightweight/SerialGraph.c 2016-03-16 12:29:33.995132302 -0700 +@@ -59,7 +59,7 @@ + + mrn_dbg_func_begin(); + +- sprintf(hoststr, "[%s:%hu:%u:", ihostname, iport, irank); ++ sprintf(hoststr, "[%s:%05hu:%u:", ihostname, iport, irank); + mrn_dbg(5, mrn_printf(FLF, stderr, "looking for SubTreeRoot: '%s'\n", hoststr)); + + byte_array = sg->byte_array; +@@ -110,7 +110,7 @@ + + mrn_dbg_func_begin(); + +- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:0]", ihostname, iport, irank); ++ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:0]", ihostname, iport, irank); + mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree leaf: %s\n", hoststr)); + + len += strlen(sg->byte_array) + 1; +@@ -139,7 +139,7 @@ + + mrn_dbg_func_begin(); + +- len = (size_t) sprintf(hoststr, "[%s:%hu:%u:1", ihostname, iport, irank); ++ len = (size_t) sprintf(hoststr, "[%s:%05hu:%u:1", ihostname, iport, irank); + mrn_dbg(5, mrn_printf(FLF, stderr, "adding sub tree root: %s\n", hoststr)); + + len += strlen(sg->byte_array) + 1; +@@ -360,8 +360,8 @@ + char old_hoststr[256]; + char new_hoststr[256]; + +- sprintf(old_hoststr, "[%s:%hu:%u:", hostname, UnknownPort, irank); +- sprintf(new_hoststr, "[%s:%hu:%u:", hostname, port, irank); ++ sprintf(old_hoststr, "[%s:%05hu:%u:", hostname, UnknownPort, irank); ++ sprintf(new_hoststr, "[%s:%05hu:%u:", hostname, port, irank); + + old_byte_array = sg->byte_array; + new_byte_array = (char*) malloc( strlen(old_byte_array) + 10 ); +--- mrnet-3093918/xplat/src/lightweight/SocketUtils.c 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/xplat/src/lightweight/SocketUtils.c 2016-03-16 12:29:34.006132303 -0700 +@@ -15,7 +15,7 @@ + #else + const XPlat_Socket InvalidSocket = INVALID_SOCKET; + #endif +-const XPlat_Port InvalidPort = (XPlat_Port)-1; ++const XPlat_Port InvalidPort = (XPlat_Port)0; + + static bool_t SetTcpNoDelay( XPlat_Socket sock ) + { +--- mrnet-3093918/conf/configure.in 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/conf/configure.in 2016-03-16 12:45:54.573196781 -0700 +@@ -107,6 +107,18 @@ + AC_SUBST(PURIFY) + + ++AC_ARG_WITH(expat, ++ [AS_HELP_STRING([--with-expat=PATH], ++ [Absolute path to installation of EXPAT libraries (note: specify the path to the directory containing "include" and "lib" sub-directories)])], ++ [EXPAT_DIR="${withval}"], ++ [EXPAT_DIR=""]) ++ ++if test "x$EXPAT_DIR" = "x" ; then ++ EXPAT_LIB="" ++else ++ EXPAT_LIB="-L$EXPAT_DIR/lib" ++fi ++ + dnl === Checks for header files. + AC_CHECK_HEADERS([assert.h errno.h fcntl.h limits.h netdb.h signal.h stddef.h stdlib.h stdio.h string.h unistd.h arpa/inet.h netinet/in.h sys/ioctl.h sys/socket.h sys/sockio.h sys/time.h]) + AC_HEADER_STDBOOL +@@ -432,7 +444,7 @@ + CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps" + CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic" + CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps" +- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic" ++ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic" + + AC_CHECK_LIB( [alps], [alps_launch_tool_helper], + [HAVE_ATH_LIBS="yes"; EXTRA_LIBS="$CRAYXT_ATH_LIBS $EXTRA_LIBS"; EXTRA_LIBS_SO="$CRAYXT_ATH_LIBS_SO $EXTRA_LIBS_SO"], +--- mrnet-3093918/configure 2015-12-10 09:32:24.000000000 -0800 ++++ mrnet_top_of_tree/configure 2016-03-16 13:47:20.386439143 -0700 +@@ -742,6 +742,7 @@ + enable_debug + enable_ltwt_threadsafe + with_purify ++with_expat + ' + ac_precious_vars='build_alias + host_alias +@@ -1399,6 +1400,9 @@ + containing "include" and "lib" sub-directories) + --with-launchmon=PATH Absolute path to installation of LaunchMON + --with-purify Use purify for memory debugging ++ --with-expat=PATH Absolute path to installation of EXPAT libraries ++ (note: specify the path to the directory containing ++ "include" and "lib" sub-directories) + + Some influential environment variables: + CC C compiler command +@@ -3541,6 +3545,21 @@ + + + ++# Check whether --with-expat was given. ++if test "${with_expat+set}" = set; then : ++ withval=$with_expat; EXPAT_DIR="${withval}" ++else ++ EXPAT_DIR="" ++fi ++ ++ ++if test "x$EXPAT_DIR" = "x" ; then ++ EXPAT_LIB="" ++else ++ EXPAT_LIB="-L$EXPAT_DIR/lib" ++fi ++ ++ + ac_ext=cpp + ac_cpp='$CXXCPP $CPPFLAGS' + ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' +@@ -5473,7 +5492,7 @@ + CRAYXT_ATH_LIBS_SO="$CRAYXT_ATH_LIBS -lalps" + CRAYXT_ATH_LIBS="$CRAYXT_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc -Wl,-Bdynamic" + CRAYXE_ATH_LIBS_SO="$CRAYXE_ATH_LIBS -lalps" +- CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi -lexpat -Wl,-Bdynamic" ++ CRAYXE_ATH_LIBS="$CRAYXE_ATH_LIBS -Wl,-Bstatic -lalps -lxmlrpc-epi $EXPAT_LIB -lexpat -Wl,-Bdynamic" + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for alps_launch_tool_helper in -lalps" >&5 + $as_echo_n "checking for alps_launch_tool_helper in -lalps... " >&6; } diff --git a/var/spack/repos/builtin/packages/mrnet/package.py b/var/spack/repos/builtin/packages/mrnet/package.py index fed944e45f30b5e7c11e5e0d610c489177fe2fdb..a3abb71285da720817f7b2415c0a63f14af49ec3 100644 --- a/var/spack/repos/builtin/packages/mrnet/package.py +++ b/var/spack/repos/builtin/packages/mrnet/package.py @@ -3,11 +3,17 @@ class Mrnet(Package): """The MRNet Multi-Cast Reduction Network.""" homepage = "http://paradyn.org/mrnet" - url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" + url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_5.0.1.tar.gz" + list_url = "http://ftp.cs.wisc.edu/paradyn/mrnet" - version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') - version('4.1.0', '5a248298b395b329e2371bf25366115c') version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd') + version('4.1.0', '5a248298b395b329e2371bf25366115c') + version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') + + # Add a patch that brings mrnet-5.0.1 up to date with the current development tree + # The development tree contains fixes needed for the krell based tools + variant('krellpatch', default=False, description="Build MRNet with krell openspeedshop based patch.") + patch('krell-5.0.1.patch', when='@5.0.1+krellpatch') variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries") parallel = False diff --git a/var/spack/repos/builtin/packages/mumps/Makefile.inc b/var/spack/repos/builtin/packages/mumps/Makefile.inc index 2e6a041878ce35d1344fa0cf759dff0435621dd8..22d8f5518a52ab2ad6697b25d025b782c21b868e 100644 --- a/var/spack/repos/builtin/packages/mumps/Makefile.inc +++ b/var/spack/repos/builtin/packages/mumps/Makefile.inc @@ -8,12 +8,9 @@ IORDERINGSF = $(ISCOTCH) IORDERINGSC = $(IMETIS) $(IPORD) $(ISCOTCH) PLAT = -LIBEXT = .a -OUTC = -o +OUTC = -o OUTF = -o RM = /bin/rm -f -AR = ar vr -RANLIB = ranlib INCSEQ = -I$(topdir)/libseq LIBSEQ = -L$(topdir)/libseq -lmpiseq diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 5c120c37df8758d39cb9f14340b605c4bed9644e..58f790ec32bcfa5c1e30cfd8b9a6516ce3a16253 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -1,6 +1,5 @@ from spack import * -import os - +import os, sys, glob class Mumps(Package): """MUMPS: a MUltifrontal Massively Parallel sparse direct Solver""" @@ -19,11 +18,12 @@ class Mumps(Package): variant('float', default=True, description='Activate the compilation of smumps') variant('complex', default=True, description='Activate the compilation of cmumps and/or zmumps') variant('idx64', default=False, description='Use int64_t/integer*8 as default index type') + variant('shared', default=True, description='Build shared libraries') depends_on('scotch + esmumps', when='~ptscotch+scotch') depends_on('scotch + esmumps + mpi', when='+ptscotch') - depends_on('metis', when='+metis') + depends_on('metis@5:', when='+metis') depends_on('parmetis', when="+parmetis") depends_on('blas') depends_on('lapack') @@ -70,6 +70,9 @@ def write_makefile_inc(self): makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings))) + # when building shared libs need -fPIC, otherwise + # /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1' can not be used when making a shared object; recompile with -fPIC + fpic = '-fPIC' if '+shared' in self.spec else '' # TODO: test this part, it needs a full blas, scalapack and # partitionning environment with 64bit integers if '+idx64' in self.spec: @@ -77,14 +80,14 @@ def write_makefile_inc(self): # the fortran compilation flags most probably are # working only for intel and gnu compilers this is # perhaps something the compiler should provide - ['OPTF = -O -DALLOW_NON_INIT %s' % '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8', - 'OPTL = -O ', - 'OPTC = -O -DINTSIZE64']) + ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic,'-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), + 'OPTL = %s -O ' % fpic, + 'OPTC = %s -O -DINTSIZE64' % fpic]) else: makefile_conf.extend( - ['OPTF = -O -DALLOW_NON_INIT', - 'OPTL = -O ', - 'OPTC = -O ']) + ['OPTF = %s -O -DALLOW_NON_INIT' % fpic, + 'OPTL = %s -O ' % fpic, + 'OPTC = %s -O ' % fpic]) if '+mpi' in self.spec: @@ -105,6 +108,27 @@ def write_makefile_inc(self): # compiler possible values are -DAdd_, -DAdd__ and/or -DUPPER makefile_conf.append("CDEFS = -DAdd_") + if '+shared' in self.spec: + if sys.platform == 'darwin': + # Building dylibs with mpif90 causes segfaults on 10.8 and 10.10. Use gfortran. (Homebrew) + makefile_conf.extend([ + 'LIBEXT=.dylib', + 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'],prefix.lib), + 'RANLIB=echo' + ]) + else: + makefile_conf.extend([ + 'LIBEXT=.so', + 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, + 'RANLIB=echo' + ]) + else: + makefile_conf.extend([ + 'LIBEXT = .a', + 'AR = ar vr', + 'RANLIB = ranlib' + ]) + makefile_inc_template = join_path(os.path.dirname(self.module.__file__), 'Makefile.inc') @@ -121,7 +145,7 @@ def write_makefile_inc(self): def install(self, spec, prefix): make_libs = [] - # the coice to compile ?examples is to have kind of a sanity + # the choice to compile ?examples is to have kind of a sanity # check on the libraries generated. if '+float' in spec: make_libs.append('sexamples') @@ -135,9 +159,27 @@ def install(self, spec, prefix): self.write_makefile_inc() - make(*make_libs) + # Build fails in parallel + make(*make_libs, parallel=False) install_tree('lib', prefix.lib) install_tree('include', prefix.include) - if '~mpi' in spec: - install('libseq/libmpiseq.a', prefix.lib) + + if '~mpi' in spec: + lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so' + lib_suffix = lib_dsuffix if '+shared' in spec else '.a' + install('libseq/libmpiseq%s' % lib_suffix, prefix.lib) + for f in glob.glob(join_path('libseq','*.h')): + install(f, prefix.include) + + # FIXME: extend the tests to mpirun -np 2 (or alike) when build with MPI + # FIXME: use something like numdiff to compare blessed output with the current + with working_dir('examples'): + if '+float' in spec: + os.system('./ssimpletest < input_simpletest_real') + if '+complex' in spec: + os.system('./csimpletest < input_simpletest_real') + if '+double' in spec: + os.system('./dsimpletest < input_simpletest_real') + if '+complex' in spec: + os.system('./zsimpletest < input_simpletest_cmplx') diff --git a/var/spack/repos/builtin/packages/muparser/package.py b/var/spack/repos/builtin/packages/muparser/package.py new file mode 100644 index 0000000000000000000000000000000000000000..19ca8ce28744a84f7364634453897496342be435 --- /dev/null +++ b/var/spack/repos/builtin/packages/muparser/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Muparser(Package): + """C++ math expression parser library.""" + homepage = "http://muparser.beltoforion.de/" + url = "https://github.com/beltoforion/muparser/archive/v2.2.5.tar.gz" + + version('2.2.5', '02dae671aa5ad955fdcbcd3fee313fb7') + + def install(self, spec, prefix): + options = ['--disable-debug', + '--disable-dependency-tracking', + '--prefix=%s' % prefix] + + configure(*options) + + make(parallel=False) + make("install") diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py index e4e95f92af7f1a2edd90a803a954370e637795d1..3e60b517dbecab198ce33912e27c33d0e31eb506 100644 --- a/var/spack/repos/builtin/packages/mvapich2/package.py +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -140,6 +140,13 @@ def set_network_type(self, spec, configure_args): configure_args.extend(network_options) + def setup_dependent_environment(self, spack_env, run_env, extension_spec): + spack_env.set('MPICH_CC', spack_cc) + spack_env.set('MPICH_CXX', spack_cxx) + spack_env.set('MPICH_F77', spack_f77) + spack_env.set('MPICH_F90', spack_fc) + spack_env.set('MPICH_FC', spack_fc) + def install(self, spec, prefix): # we'll set different configure flags depending on our environment configure_args = [ diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py index 8dc808caaccafe5d51ec14e2232262e4990ecb3e..219fbce226e03aaaf348d211b8f5cc310fc19892 100644 --- a/var/spack/repos/builtin/packages/ncurses/package.py +++ b/var/spack/repos/builtin/packages/ncurses/package.py @@ -8,11 +8,10 @@ class Ncurses(Package): """ homepage = "http://invisible-island.net/ncurses/ncurses.html" + url = "http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz" - version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', - url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz') - version('6.0', 'ee13d052e1ead260d7c28071f46eefb1', - url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz') + version('6.0', 'ee13d052e1ead260d7c28071f46eefb1') + version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1') patch('patch_gcc_5.txt', when='%gcc@5.0:') diff --git a/var/spack/repos/builtin/packages/netcdf-cxx/package.py b/var/spack/repos/builtin/packages/netcdf-cxx/package.py new file mode 100644 index 0000000000000000000000000000000000000000..8aa1d8b236e079d745c6d370fe607023519ec8ae --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf-cxx/package.py @@ -0,0 +1,19 @@ +from spack import * + +class NetcdfCxx(Package): + """Deprecated C++ compatibility bindings for NetCDF. + These do NOT read or write NetCDF-4 files, and are no longer + maintained by Unidata. Developers should migrate to current + NetCDF C++ bindings, in Spack package netcdf-cxx4.""" + + homepage = "http://www.unidata.ucar.edu/software/netcdf" + url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-cxx-4.2.tar.gz" + + version('4.2', 'd32b20c00f144ae6565d9e98d9f6204c') + + depends_on('netcdf') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index 227362399a9c3184ad44b73c4655d6b7e96eaf14..b60a2c4e9a37cd6f8182f4a763d669c2eaa14c52 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -43,6 +43,13 @@ def install(self, spec, prefix): "--enable-dap" ] + # Make sure Netcdf links against Spack's curl + # Otherwise it may pick up system's curl, which could lead to link errors: + # /usr/lib/x86_64-linux-gnu/libcurl.so: undefined reference to `SSL_CTX_use_certificate_chain_file@OPENSSL_1.0.0' + LIBS.append("-lcurl") + CPPFLAGS.append("-I%s" % spec['curl'].prefix.include) + LDFLAGS.append ("-L%s" % spec['curl'].prefix.lib) + if '+mpi' in spec: config_args.append('--enable-parallel4') diff --git a/var/spack/repos/builtin/packages/netlib-blas/package.py b/var/spack/repos/builtin/packages/netlib-blas/package.py deleted file mode 100644 index 85e97323d354674e225f70a1f6da16b739bf5b45..0000000000000000000000000000000000000000 --- a/var/spack/repos/builtin/packages/netlib-blas/package.py +++ /dev/null @@ -1,46 +0,0 @@ -from spack import * -import os - - -class NetlibBlas(Package): - """Netlib reference BLAS""" - homepage = "http://www.netlib.org/lapack/" - url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" - - version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') - - variant('fpic', default=False, description="Build with -fpic compiler option") - - # virtual dependency - provides('blas') - - # Doesn't always build correctly in parallel - parallel = False - - def patch(self): - os.symlink('make.inc.example', 'make.inc') - - mf = FileFilter('make.inc') - mf.filter('^FORTRAN.*', 'FORTRAN = f90') - mf.filter('^LOADER.*', 'LOADER = f90') - mf.filter('^CC =.*', 'CC = cc') - - if '+fpic' in self.spec: - mf.filter('^OPTS.*=.*', 'OPTS = -O2 -frecursive -fpic') - mf.filter('^CFLAGS =.*', 'CFLAGS = -O3 -fpic') - - - def install(self, spec, prefix): - make('blaslib') - - # Tests that blas builds correctly - make('blas_testing') - - # No install provided - mkdirp(prefix.lib) - install('librefblas.a', prefix.lib) - - # Blas virtual package should provide blas.a and libblas.a - with working_dir(prefix.lib): - symlink('librefblas.a', 'blas.a') - symlink('librefblas.a', 'libblas.a') diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py index 78c5a053fe505ea6d79038ab4c5e165de3b0d485..f70e634347c63e3f9b40a08e46dc00b34c59d9d4 100644 --- a/var/spack/repos/builtin/packages/netlib-lapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -1,16 +1,15 @@ from spack import * + class NetlibLapack(Package): """ - LAPACK version 3.X is a comprehensive FORTRAN library that does - linear algebra operations including matrix inversions, least - squared solutions to linear sets of equations, eigenvector - analysis, singular value decomposition, etc. It is a very - comprehensive and reputable package that has found extensive - use in the scientific community. + LAPACK version 3.X is a comprehensive FORTRAN library that does linear algebra operations including matrix + inversions, least squared solutions to linear sets of equations, eigenvector analysis, singular value + decomposition, etc. It is a very comprehensive and reputable package that has found extensive use in the + scientific community. """ homepage = "http://www.netlib.org/lapack/" - url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" + url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" version('3.6.0', 'f2f6c67134e851fe189bb3ca1fbb5101') version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') @@ -19,41 +18,67 @@ class NetlibLapack(Package): version('3.4.0', '02d5706ec03ba885fc246e5fa10d8c70') version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4') - variant('shared', default=False, description="Build shared library version") - variant('fpic', default=False, description="Build with -fpic compiler option") + variant('debug', default=False, description='Activates the Debug build type') + variant('shared', default=True, description="Build shared library version") + variant('external-blas', default=False, description='Build lapack with an external blas') + + variant('lapacke', default=True, description='Activates the build of the LAPACKE C interface') # virtual dependency + provides('blas', when='~external-blas') provides('lapack') - # blas is a virtual dependency. - depends_on('blas') depends_on('cmake') + depends_on('blas', when='+external-blas') + + + def patch(self): + # Fix cblas CMakeLists.txt -- has wrong case for subdirectory name. + if self.spec.satisfies('@3.6.0:'): + filter_file('${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/', + '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', 'CBLAS/CMakeLists.txt', string=True) - # Doesn't always build correctly in parallel - parallel = False + def install_one(self, spec, prefix, shared): + cmake_args = ['-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'), + '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), + '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')] + if spec.satisfies('@3.6.0:'): + cmake_args.extend(['-DCBLAS=ON']) # always build CBLAS - @when('^netlib-blas') - def get_blas_libs(self): - blas = self.spec['netlib-blas'] - return [join_path(blas.prefix.lib, 'blas.a')] + if '+external-blas' in spec: + # TODO : the mechanism to specify the library should be more general, + # TODO : but this allows to have an hook to an external blas + cmake_args.extend([ + '-DUSE_OPTIMIZED_BLAS:BOOL=ON', + '-DBLAS_LIBRARIES:PATH=%s' % join_path(spec['blas'].prefix.lib, 'libblas.a') + ]) + + cmake_args.extend(std_cmake_args) + + build_dir = 'spack-build' + ('-shared' if shared else '-static') + with working_dir(build_dir, create=True): + cmake('..', *cmake_args) + make() + make("install") - @when('^atlas') - def get_blas_libs(self): - blas = self.spec['atlas'] - return [join_path(blas.prefix.lib, l) - for l in ('libf77blas.a', 'libatlas.a')] def install(self, spec, prefix): - blas_libs = ";".join(self.get_blas_libs()) - cmake_args = [".", '-DBLAS_LIBRARIES=' + blas_libs] + # Always build static libraries. + self.install_one(spec, prefix, False) + # Build shared libraries if requested. if '+shared' in spec: - cmake_args.append('-DBUILD_SHARED_LIBS=ON') - if '+fpic' in spec: - cmake_args.append('-DCMAKE_POSITION_INDEPENDENT_CODE=ON') + self.install_one(spec, prefix, True) + + + def setup_dependent_package(self, module, dspec): + # This is WIP for a prototype interface for virtual packages. + # We can update this as more builds start depending on BLAS/LAPACK. + libdir = find_library_path('libblas.a', self.prefix.lib64, self.prefix.lib) - cmake_args += std_cmake_args + self.spec.blas_static_lib = join_path(libdir, 'libblas.a') + self.spec.lapack_static_lib = join_path(libdir, 'liblapack.a') - cmake(*cmake_args) - make() - make("install") + if '+shared' in self.spec: + self.spec.blas_shared_lib = join_path(libdir, 'libblas.%s' % dso_suffix) + self.spec.lapack_shared_lib = join_path(libdir, 'liblapack.%s' % dso_suffix) diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py index c3e6822cdfe9aae45083d3805c3bf3a227182c53..276876d197b3dcef556252484231c1449dc3e7eb 100644 --- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py +++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py @@ -18,6 +18,7 @@ class NetlibScalapack(Package): provides('scalapack') + depends_on('cmake') depends_on('mpi') depends_on('lapack') @@ -41,6 +42,11 @@ def install(self, spec, prefix): make() make("install") + # The shared libraries are not installed correctly on Darwin; correct this + if (sys.platform == 'darwin') and ('+shared' in spec): + fix_darwin_install_name(prefix.lib) + + def setup_dependent_package(self, module, dependent_spec): spec = self.spec lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so' diff --git a/var/spack/repos/builtin/packages/ninja/package.py b/var/spack/repos/builtin/packages/ninja/package.py index 9e6bf4e358dc1786c7fc8783bd495543202f235a..0722dd49a64ffef2e347fbffbd04b972bb22e8c3 100644 --- a/var/spack/repos/builtin/packages/ninja/package.py +++ b/var/spack/repos/builtin/packages/ninja/package.py @@ -16,7 +16,7 @@ def install(self, spec, prefix): cp = which('cp') - bindir = os.path.join(prefix, 'bin') + bindir = os.path.join(prefix, 'bin/') mkdir(bindir) - cp('-a', '-t', bindir, 'ninja') - cp('-ra', 'misc', prefix) + cp('-a', 'ninja', bindir) + cp('-a', 'misc', prefix) diff --git a/var/spack/repos/builtin/packages/numdiff/package.py b/var/spack/repos/builtin/packages/numdiff/package.py new file mode 100644 index 0000000000000000000000000000000000000000..e72c60fadbe25d0067f2f63a269ce38b51e98b29 --- /dev/null +++ b/var/spack/repos/builtin/packages/numdiff/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import sys + +class Numdiff(Package): + """Numdiff is a little program that can be used to compare putatively + similar files line by line and field by field, ignoring small numeric + differences or/and different numeric formats.""" + + homepage = 'https://www.nongnu.org/numdiff' + url = 'http://nongnu.askapache.com/numdiff/numdiff-5.8.1.tar.gz' + + version('5.8.1', 'a295eb391f6cb1578209fc6b4f9d994e') + + depends_on('gettext', sys.platform=='darwin') + + def install(self, spec, prefix): + options = ['--prefix=%s' % prefix] + configure(*options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/oce/null.patch b/var/spack/repos/builtin/packages/oce/null.patch new file mode 100644 index 0000000000000000000000000000000000000000..42a3f0e44f33466a12a4ab52d6e79ab0efd67f20 --- /dev/null +++ b/var/spack/repos/builtin/packages/oce/null.patch @@ -0,0 +1,482 @@ +From 61cb965b9ffeca419005bc15e635e67589c421dd Mon Sep 17 00:00:00 2001 +From: Martin Siggel <martin.siggel@dlr.de> +Date: Thu, 28 Jan 2016 19:05:00 +0100 +Subject: [PATCH] Workaround clang optimizations for null references + +OCCT/OCE includes some evil code that uses NULL references, +which are normally not possible. Clang removes code in +branches like if(&myNullRef==NULL) as it assumes this can +never be true. This fix was inspired from the mantis issue +http://tracker.dev.opencascade.org/view.php?id=26042. This +code will be fixed in OCCT 7, but we might require the fix +for earlier releases as well. + +Fixes issue #576 +--- + inc/PLib.hxx | 2 +- + src/BSplCLib/BSplCLib.cxx | 16 ++++++------- + src/BSplCLib/BSplCLib_2.cxx | 6 ++--- + src/BSplCLib/BSplCLib_CurveComputation.gxx | 26 ++++++++++----------- + src/BSplSLib/BSplSLib.cxx | 36 +++++++++++++++--------------- + src/BSplSLib/BSplSLib_BzSyntaxes.cxx | 2 +- + src/PLib/PLib.cxx | 10 ++++----- + 7 files changed, 49 insertions(+), 49 deletions(-) + +diff --git a/inc/PLib.hxx b/inc/PLib.hxx +index 7513234..52b1f84 100644 +--- a/inc/PLib.hxx ++++ b/inc/PLib.hxx +@@ -343,6 +343,6 @@ friend class PLib_DoubleJacobiPolynomial; + + + +- ++#define IS_NULL_REF(ref) ((reinterpret_cast<size_t>(&ref) & 0xFFFFFF) == 0) + + #endif // _PLib_HeaderFile +diff --git a/src/BSplCLib/BSplCLib.cxx b/src/BSplCLib/BSplCLib.cxx +index 683e4ab..2a2d9ea 100644 +--- a/src/BSplCLib/BSplCLib.cxx ++++ b/src/BSplCLib/BSplCLib.cxx +@@ -298,7 +298,7 @@ void BSplCLib::LocateParameter + Standard_Real& NewU) + { + Standard_Integer first,last; +- if (&Mults) { ++ if (!IS_NULL_REF(Mults)) { + if (Periodic) { + first = Knots.Lower(); + last = Knots.Upper(); +@@ -1434,7 +1434,7 @@ void BSplCLib::BuildKnots(const Standard_Integer Degree, + const Standard_Real * pkn = &Knots(KLower); + pkn -= KLower; + Standard_Real *knot = &LK; +- if (&Mults == NULL) { ++ if (IS_NULL_REF(Mults)) { + switch (Degree) { + case 1 : { + Standard_Integer j = Index ; +@@ -1672,7 +1672,7 @@ Standard_Boolean BSplCLib::PrepareInsertKnots + const Standard_Real Tolerance, + const Standard_Boolean Add) + { +- Standard_Boolean addflat = &AddMults == NULL; ++ Standard_Boolean addflat = IS_NULL_REF(AddMults); + + Standard_Integer first,last; + if (Periodic) { +@@ -1856,7 +1856,7 @@ void BSplCLib::InsertKnots + const Standard_Real Tolerance, + const Standard_Boolean Add) + { +- Standard_Boolean addflat = &AddMults == NULL; ++ Standard_Boolean addflat = IS_NULL_REF(AddMults); + + Standard_Integer i,k,mult,firstmult; + Standard_Integer index,kn,curnk,curk; +@@ -3902,7 +3902,7 @@ void BSplCLib::Resolution( Standard_Real& Poles, + num_poles = FlatKnots.Length() - Deg1; + switch (ArrayDimension) { + case 2 : { +- if (&Weights != NULL) { ++ if (!IS_NULL_REF(Weights)) { + const Standard_Real * WG = &Weights(Weights.Lower()); + min_weights = WG[0]; + +@@ -3970,7 +3970,7 @@ void BSplCLib::Resolution( Standard_Real& Poles, + break; + } + case 3 : { +- if (&Weights != NULL) { ++ if (!IS_NULL_REF(Weights)) { + const Standard_Real * WG = &Weights(Weights.Lower()); + min_weights = WG[0]; + +@@ -4047,7 +4047,7 @@ void BSplCLib::Resolution( Standard_Real& Poles, + break; + } + case 4 : { +- if (&Weights != NULL) { ++ if (!IS_NULL_REF(Weights)) { + const Standard_Real * WG = &Weights(Weights.Lower()); + min_weights = WG[0]; + +@@ -4134,7 +4134,7 @@ void BSplCLib::Resolution( Standard_Real& Poles, + } + default : { + Standard_Integer kk; +- if (&Weights != NULL) { ++ if (!IS_NULL_REF(Weights)) { + const Standard_Real * WG = &Weights(Weights.Lower()); + min_weights = WG[0]; + +diff --git a/src/BSplCLib/BSplCLib_2.cxx b/src/BSplCLib/BSplCLib_2.cxx +index 35c4639..653b7cd 100644 +--- a/src/BSplCLib/BSplCLib_2.cxx ++++ b/src/BSplCLib/BSplCLib_2.cxx +@@ -70,7 +70,7 @@ void BSplCLib::BuildEval(const Standard_Integer Degree, + Standard_Integer i; + Standard_Integer ip = PLower + Index - 1; + Standard_Real w, *pole = &LP; +- if (&Weights == NULL) { ++ if (IS_NULL_REF(Weights)) { + + for (i = 0; i <= Degree; i++) { + ip++; +@@ -115,13 +115,13 @@ static void PrepareEval + + // make the knots + BSplCLib::BuildKnots(Degree,index,Periodic,Knots,Mults,*dc.knots); +- if (&Mults == NULL) ++ if (IS_NULL_REF(Mults)) + index -= Knots.Lower() + Degree; + else + index = BSplCLib::PoleIndex(Degree,index,Periodic,Mults); + + // check truly rational +- rational = (&Weights != NULL); ++ rational = (!IS_NULL_REF(Weights)); + if (rational) { + Standard_Integer WLower = Weights.Lower() + index; + rational = BSplCLib::IsRational(Weights, WLower, WLower + Degree); +diff --git a/src/BSplCLib/BSplCLib_CurveComputation.gxx b/src/BSplCLib/BSplCLib_CurveComputation.gxx +index e71b4e0..9d42643 100644 +--- a/src/BSplCLib/BSplCLib_CurveComputation.gxx ++++ b/src/BSplCLib/BSplCLib_CurveComputation.gxx +@@ -92,7 +92,7 @@ Standard_Boolean BSplCLib::RemoveKnot + TColStd_Array1OfInteger& NewMults, + const Standard_Real Tolerance) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim; + dim = Dimension_gen; + if (rational) dim++; +@@ -133,7 +133,7 @@ void BSplCLib::InsertKnots + const Standard_Real Epsilon, + const Standard_Boolean Add) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim; + dim = Dimension_gen; + if (rational) dim++; +@@ -222,7 +222,7 @@ void BSplCLib::IncreaseDegree + TColStd_Array1OfReal& NewKnots, + TColStd_Array1OfInteger& NewMults) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim; + dim = Dimension_gen; + if (rational) dim++; +@@ -256,7 +256,7 @@ void BSplCLib::Unperiodize + Array1OfPoints& NewPoles, + TColStd_Array1OfReal& NewWeights) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim; + dim = Dimension_gen; + if (rational) dim++; +@@ -292,7 +292,7 @@ void BSplCLib::Trimming(const Standard_Integer Degree, + Array1OfPoints& NewPoles, + TColStd_Array1OfReal& NewWeights) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim; + dim = Dimension_gen; + if (rational) dim++; +@@ -339,7 +339,7 @@ void BSplCLib::BuildEval(const Standard_Integer Degree, + Standard_Integer PUpper = Poles.Upper(); + Standard_Integer i; + Standard_Integer ip = PLower + Index - 1; +- if (&Weights == NULL) { ++ if (IS_NULL_REF(Weights)) { + for (i = 0; i <= Degree; i++) { + ip++; + if (ip > PUpper) ip = PLower; +@@ -384,13 +384,13 @@ static void PrepareEval + + // make the knots + BSplCLib::BuildKnots(Degree,index,Periodic,Knots,Mults,*dc.knots); +- if (&Mults == NULL) ++ if (IS_NULL_REF(Mults)) + index -= Knots.Lower() + Degree; + else + index = BSplCLib::PoleIndex(Degree,index,Periodic,Mults); + + // check truly rational +- rational = (&Weights != NULL); ++ rational = (!IS_NULL_REF(Weights)); + if (rational) { + Standard_Integer WLower = Weights.Lower() + index; + rational = BSplCLib::IsRational(Weights, WLower, WLower + Degree); +@@ -741,7 +741,7 @@ void BSplCLib::CacheD0(const Standard_Real Parameter, + Degree * Dimension_gen, + PArray[0], + myPoint[0]) ; +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + Standard_Real * + WArray = (Standard_Real *) &WeightsArray(WeightsArray.Lower()) ; + PLib::NoDerivativeEvalPolynomial(NewParameter, +@@ -798,7 +798,7 @@ void BSplCLib::CacheD1(const Standard_Real Parameter, + + ModifyCoords (LocalPDerivatives + Dimension_gen, /= SpanLenght); + +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + Standard_Real * + WArray = (Standard_Real *) &WeightsArray(WeightsArray.Lower()) ; + PLib::EvalPolynomial(NewParameter, +@@ -878,7 +878,7 @@ void BSplCLib::CacheD2(const Standard_Real Parameter, + Index += Dimension_gen; + } + +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + Standard_Real * + WArray = (Standard_Real *) &WeightsArray(WeightsArray.Lower()) ; + +@@ -971,7 +971,7 @@ void BSplCLib::CacheD3(const Standard_Real Parameter, + Index += Dimension_gen; + } + +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + Standard_Real * + WArray = (Standard_Real *) &WeightsArray(WeightsArray.Lower()) ; + +@@ -1081,7 +1081,7 @@ void BSplCLib::BuildCache + LocalValue *= SpanDomain / (Standard_Real) ii ; + } + +- if (&Weights != NULL) { ++ if (!IS_NULL_REF(Weights)) { + for (ii = 1 ; ii <= Degree + 1 ; ii++) + CacheWeights(ii) = 0.0e0 ; + CacheWeights(1) = 1.0e0 ; +diff --git a/src/BSplSLib/BSplSLib.cxx b/src/BSplSLib/BSplSLib.cxx +index 5ad633c..07040d5 100644 +--- a/src/BSplSLib/BSplSLib.cxx ++++ b/src/BSplSLib/BSplSLib.cxx +@@ -309,12 +309,12 @@ static Standard_Boolean PrepareEval (const Standard_Real U, + BSplCLib::BuildKnots(UDegree,uindex,UPer,UKnots,UMults,*dc.knots1); + BSplCLib::BuildKnots(VDegree,vindex,VPer,VKnots,VMults,*dc.knots2); + +- if (&UMults == NULL) ++ if (IS_NULL_REF(UMults)) + uindex -= UKLower + UDegree; + else + uindex = BSplCLib::PoleIndex(UDegree,uindex,UPer,UMults); + +- if (&VMults == NULL) ++ if (IS_NULL_REF(VMults)) + vindex -= VKLower + VDegree; + else + vindex = BSplCLib::PoleIndex(VDegree,vindex,VPer,VMults); +@@ -460,12 +460,12 @@ static Standard_Boolean PrepareEval (const Standard_Real U, + BSplCLib::BuildKnots(UDegree,uindex,UPer,UKnots,UMults,*dc.knots2); + BSplCLib::BuildKnots(VDegree,vindex,VPer,VKnots,VMults,*dc.knots1); + +- if (&UMults == NULL) ++ if (IS_NULL_REF(UMults)) + uindex -= UKLower + UDegree; + else + uindex = BSplCLib::PoleIndex(UDegree,uindex,UPer,UMults); + +- if (&VMults == NULL) ++ if (IS_NULL_REF(VMults)) + vindex -= VKLower + VDegree; + else + vindex = BSplCLib::PoleIndex(VDegree,vindex,VPer,VMults); +@@ -1299,7 +1299,7 @@ void BSplSLib::Iso(const Standard_Real Param, + { + Standard_Integer index = 0; + Standard_Real u = Param; +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim = rational ? 4 : 3; + + // compute local knots +@@ -1307,7 +1307,7 @@ void BSplSLib::Iso(const Standard_Real Param, + NCollection_LocalArray<Standard_Real> locknots1 (2*Degree); + BSplCLib::LocateParameter(Degree,Knots,Mults,u,Periodic,index,u); + BSplCLib::BuildKnots(Degree,index,Periodic,Knots,Mults,*locknots1); +- if (&Mults == NULL) ++ if (IS_NULL_REF(Mults)) + index -= Knots.Lower() + Degree; + else + index = BSplCLib::PoleIndex(Degree,index,Periodic,Mults); +@@ -1381,7 +1381,7 @@ void BSplSLib::Iso(const Standard_Real Param, + } + + // if the input is not rational but weights are wanted +- if (!rational && (&CWeights != NULL)) { ++ if (!rational && (!IS_NULL_REF(CWeights))) { + + for (i = CWeights.Lower(); i <= CWeights.Upper(); i++) + CWeights(i) = 1.; +@@ -1741,7 +1741,7 @@ void BSplSLib::InsertKnots(const Standard_Boolean UDirection, + const Standard_Real Epsilon, + const Standard_Boolean Add ) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim = 3; + if (rational) dim++; + +@@ -1787,7 +1787,7 @@ Standard_Boolean BSplSLib::RemoveKnot + TColStd_Array1OfInteger& NewMults, + const Standard_Real Tolerance) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim = 3; + if (rational) dim++; + +@@ -1834,7 +1834,7 @@ void BSplSLib::IncreaseDegree + TColStd_Array1OfReal& NewKnots, + TColStd_Array1OfInteger& NewMults) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim = 3; + if (rational) dim++; + +@@ -1876,7 +1876,7 @@ void BSplSLib::Unperiodize + TColgp_Array2OfPnt& NewPoles, + TColStd_Array2OfReal& NewWeights) + { +- Standard_Boolean rational = &Weights != NULL; ++ Standard_Boolean rational = !IS_NULL_REF(Weights); + Standard_Integer dim = 3; + if (rational) dim++; + +@@ -1929,7 +1929,7 @@ void BSplSLib::BuildCache + Standard_Boolean rational,rational_u,rational_v,flag_u_or_v; + Standard_Integer kk,d1,d1p1,d2,d2p1,ii,jj,iii,jjj,Index; + Standard_Real u1,min_degree_domain,max_degree_domain,f,factor[2],u2; +- if (&Weights != NULL) ++ if (!IS_NULL_REF(Weights)) + rational_u = rational_v = Standard_True; + else + rational_u = rational_v = Standard_False; +@@ -2025,7 +2025,7 @@ void BSplSLib::BuildCache + } + factor[0] *= max_degree_domain / (Standard_Real) (iii) ; + } +- if (&Weights != NULL) { ++ if (!IS_NULL_REF(Weights)) { + // + // means that PrepareEval did found out that the surface was + // locally polynomial but since the surface is constructed +@@ -2110,7 +2110,7 @@ void BSplSLib::CacheD0(const Standard_Real UParameter, + (min_degree << 1) + min_degree, + locpoles[0], + myPoint[0]) ; +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + dimension = min_degree + 1 ; + Standard_Real * + WArray = (Standard_Real *) +@@ -2190,7 +2190,7 @@ void BSplSLib::CacheD1(const Standard_Real UParameter, + // the coefficients + // + // +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + + local_poles_array [0][0][0] = 0.0e0 ; + local_poles_array [0][0][1] = 0.0e0 ; +@@ -2275,7 +2275,7 @@ void BSplSLib::CacheD1(const Standard_Real UParameter, + locpoles[dimension], + local_poles_array[1][0][0]) ; + +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + dimension = min_degree + 1 ; + Standard_Real * + WArray = (Standard_Real *) +@@ -2435,7 +2435,7 @@ void BSplSLib::CacheD2(const Standard_Real UParameter, + // the coefficients + // + // +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + + local_poles_and_weights_array[0][0][0] = 0.0e0 ; + local_poles_and_weights_array[0][0][1] = 0.0e0 ; +@@ -2564,7 +2564,7 @@ void BSplSLib::CacheD2(const Standard_Real UParameter, + locpoles[dimension + dimension], + local_poles_array[2][0][0]) ; + +- if (&WeightsArray != NULL) { ++ if (!IS_NULL_REF(WeightsArray)) { + dimension = min_degree + 1 ; + Standard_Real * + WArray = (Standard_Real *) +diff --git a/src/BSplSLib/BSplSLib_BzSyntaxes.cxx b/src/BSplSLib/BSplSLib_BzSyntaxes.cxx +index 0faf6b6..f2c0f74 100644 +--- a/src/BSplSLib/BSplSLib_BzSyntaxes.cxx ++++ b/src/BSplSLib/BSplSLib_BzSyntaxes.cxx +@@ -68,7 +68,7 @@ void BSplSLib::PolesCoefficients (const TColgp_Array2OfPnt& Poles, + biduflatknots,bidvflatknots, + Poles,Weights, + CPoles,CWeights); +- if (&Weights == NULL) { ++ if (IS_NULL_REF(Weights)) { + + for (ii = 1; ii <= uclas; ii++) { + +diff --git a/src/PLib/PLib.cxx b/src/PLib/PLib.cxx +index 23fa302..7ee231f 100644 +--- a/src/PLib/PLib.cxx ++++ b/src/PLib/PLib.cxx +@@ -2427,7 +2427,7 @@ void PLib::CoefficientsPoles (const Standard_Integer dim, + TColStd_Array1OfReal& Poles, + TColStd_Array1OfReal& Weights) + { +- Standard_Boolean rat = &WCoefs != NULL; ++ Standard_Boolean rat = !IS_NULL_REF(WCoefs); + Standard_Integer loc = Coefs.Lower(); + Standard_Integer lop = Poles.Lower(); + Standard_Integer lowc=0; +@@ -2550,7 +2550,7 @@ void PLib::Trimming(const Standard_Real U1, + Standard_Integer indc, indw=0; + Standard_Integer upc = Coefs.Upper() - dim + 1, upw=0; + Standard_Integer len = Coefs.Length()/dim; +- Standard_Boolean rat = &WCoefs != NULL; ++ Standard_Boolean rat = !IS_NULL_REF(WCoefs); + + if (rat) { + if(len != WCoefs.Length()) +@@ -2607,7 +2607,7 @@ void PLib::CoefficientsPoles (const TColgp_Array2OfPnt& Coefs, + TColgp_Array2OfPnt& Poles, + TColStd_Array2OfReal& Weights) + { +- Standard_Boolean rat = (&WCoefs != NULL); ++ Standard_Boolean rat = (!IS_NULL_REF(WCoefs)); + Standard_Integer LowerRow = Poles.LowerRow(); + Standard_Integer UpperRow = Poles.UpperRow(); + Standard_Integer LowerCol = Poles.LowerCol(); +@@ -2701,7 +2701,7 @@ void PLib::UTrimming(const Standard_Real U1, + TColgp_Array2OfPnt& Coeffs, + TColStd_Array2OfReal& WCoeffs) + { +- Standard_Boolean rat = &WCoeffs != NULL; ++ Standard_Boolean rat = !IS_NULL_REF(WCoeffs); + Standard_Integer lr = Coeffs.LowerRow(); + Standard_Integer ur = Coeffs.UpperRow(); + Standard_Integer lc = Coeffs.LowerCol(); +@@ -2735,7 +2735,7 @@ void PLib::VTrimming(const Standard_Real V1, + TColgp_Array2OfPnt& Coeffs, + TColStd_Array2OfReal& WCoeffs) + { +- Standard_Boolean rat = &WCoeffs != NULL; ++ Standard_Boolean rat = !IS_NULL_REF(WCoeffs); + Standard_Integer lr = Coeffs.LowerRow(); + Standard_Integer ur = Coeffs.UpperRow(); + Standard_Integer lc = Coeffs.LowerCol(); \ No newline at end of file diff --git a/var/spack/repos/builtin/packages/oce/package.py b/var/spack/repos/builtin/packages/oce/package.py new file mode 100644 index 0000000000000000000000000000000000000000..3fe6638e66c227b800c7579eb3055df0cfab47d1 --- /dev/null +++ b/var/spack/repos/builtin/packages/oce/package.py @@ -0,0 +1,67 @@ +from spack import * +import platform, sys + +class Oce(Package): + """ + Open CASCADE Community Edition: + patches/improvements/experiments contributed by users over the official Open CASCADE library. + """ + homepage = "https://github.com/tpaviot/oce" + url = "https://github.com/tpaviot/oce/archive/OCE-0.17.tar.gz" + + version('0.17.1', '36c67b87093c675698b483454258af91') + version('0.17' , 'f1a89395c4b0d199bea3db62b85f818d') + version('0.16.1', '4d591b240c9293e879f50d86a0cb2bb3') + version('0.16' , '7a4b4df5a104d75a537e25e7dd387eca') + + variant('tbb', default=True, description='Build with Intel Threading Building Blocks') + + depends_on('cmake@2.8:') + depends_on('tbb', when='+tbb') + + # There is a bug in OCE which appears with Clang (version?) or GCC 6.0 + # and has to do with compiler optimization, see + # https://github.com/tpaviot/oce/issues/576 + # http://tracker.dev.opencascade.org/view.php?id=26042 + # https://github.com/tpaviot/oce/issues/605 + # https://github.com/tpaviot/oce/commit/61cb965b9ffeca419005bc15e635e67589c421dd.patch + patch('null.patch',when='@0.16:0.17.1') + + + def install(self, spec, prefix): + options = [] + options.extend(std_cmake_args) + options.extend([ + '-DOCE_INSTALL_PREFIX=%s' % prefix, + '-DOCE_BUILD_SHARED_LIB:BOOL=ON', + '-DCMAKE_BUILD_TYPE:STRING=Release', + '-DOCE_DATAEXCHANGE:BOOL=ON', + '-DOCE_DISABLE_X11:BOOL=ON', + '-DOCE_DRAW:BOOL=OFF', + '-DOCE_MODEL:BOOL=ON', + '-DOCE_MULTITHREAD_LIBRARY:STRING=%s' % ('TBB' if '+tbb' in spec else 'NONE'), + '-DOCE_OCAF:BOOL=ON', + '-DOCE_USE_TCL_TEST_FRAMEWORK:BOOL=OFF', + '-DOCE_VISUALISATION:BOOL=OFF', + '-DOCE_WITH_FREEIMAGE:BOOL=OFF', + '-DOCE_WITH_GL2PS:BOOL=OFF', + '-DOCE_WITH_OPENCL:BOOL=OFF' + ]) + + if platform.system() == 'Darwin': + options.extend([ + '-DOCE_OSX_USE_COCOA:BOOL=ON', + ]) + + cmake('.', *options) + + make("install/strip") + + # OCE tests build is brocken at least on Darwin. + # Unit tests are linked against libTKernel.10.dylib isntead of /full/path/libTKernel.10.dylib + # see https://github.com/tpaviot/oce/issues/612 + # make("test") + + # The shared libraries are not installed correctly on Darwin; correct this + if (sys.platform == 'darwin'): + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 781a1e2ec8fed967c1f0654b4b9cb23843bd7192..4522130ccc948ba3d91c683878aa35c7bff1a2fe 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -1,30 +1,78 @@ from spack import * import sys +import os class Openblas(Package): """OpenBLAS: An optimized BLAS library""" homepage = "http://www.openblas.net" url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz" + version('0.2.17', '664a12807f2a2a7cda4781e3ab2ae0e1') version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc') version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9') + variant('shared', default=True, description="Build shared libraries as well as static libs.") + variant('openmp', default=True, description="Enable OpenMP support.") + # virtual dependency provides('blas') provides('lapack') + def install(self, spec, prefix): - make('libs', 'netlib', 'shared', 'CC=cc', 'FC=f77') - make('install', "PREFIX='%s'" % prefix) + # Openblas is picky about compilers. Configure fails with + # FC=/abs/path/to/f77, whereas FC=f77 works fine. + # To circumvent this, provide basename only: + make_defs = ['CC=%s' % os.path.basename(spack_cc), + 'FC=%s' % os.path.basename(spack_f77)] + + make_targets = ['libs', 'netlib'] + + # Build shared if variant is set. + if '+shared' in spec: + make_targets += ['shared'] + else: + make_defs += ['NO_SHARED=1'] + + # fix missing _dggsvd_ and _sggsvd_ + if spec.satisfies('@0.2.16'): + make_defs += ['BUILD_LAPACK_DEPRECATED=1'] + + # Add support for OpenMP + # Note: Make sure your compiler supports OpenMP + if '+openmp' in spec: + make_defs += ['USE_OPENMP=1'] + + make_args = make_defs + make_targets + make(*make_args) + + make("tests", *make_defs) + + # no quotes around prefix (spack doesn't use a shell) + make('install', "PREFIX=%s" % prefix, *make_defs) - lib_dsuffix = 'dylib' if sys.platform == 'darwin' else 'so' # Blas virtual package should provide blas.a and libblas.a with working_dir(prefix.lib): symlink('libopenblas.a', 'blas.a') symlink('libopenblas.a', 'libblas.a') - symlink('libopenblas.%s' % lib_dsuffix, 'libblas.%s' % lib_dsuffix) + if '+shared' in spec: + symlink('libopenblas.%s' % dso_suffix, 'libblas.%s' % dso_suffix) # Lapack virtual package should provide liblapack.a with working_dir(prefix.lib): symlink('libopenblas.a', 'liblapack.a') - symlink('libopenblas.%s' % lib_dsuffix, 'liblapack.%s' % lib_dsuffix) + if '+shared' in spec: + symlink('libopenblas.%s' % dso_suffix, 'liblapack.%s' % dso_suffix) + + + def setup_dependent_package(self, module, dspec): + # This is WIP for a prototype interface for virtual packages. + # We can update this as more builds start depending on BLAS/LAPACK. + libdir = find_library_path('libopenblas.a', self.prefix.lib64, self.prefix.lib) + + self.spec.blas_static_lib = join_path(libdir, 'libopenblas.a') + self.spec.lapack_static_lib = self.spec.blas_static_lib + + if '+shared' in self.spec: + self.spec.blas_shared_lib = join_path(libdir, 'libopenblas.%s' % dso_suffix) + self.spec.lapack_shared_lib = self.spec.blas_shared_lib diff --git a/var/spack/repos/builtin/packages/openjpeg/package.py b/var/spack/repos/builtin/packages/openjpeg/package.py new file mode 100644 index 0000000000000000000000000000000000000000..afec197d11242ad8076d872f8032c0e7376943c4 --- /dev/null +++ b/var/spack/repos/builtin/packages/openjpeg/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Openjpeg(Package): + """ + OpenJPEG is an open-source JPEG 2000 codec written in C language. + It has been developed in order to promote the use of JPEG 2000, a + still-image compression standard from the Joint Photographic + Experts Group (JPEG). + Since April 2015, it is officially recognized by ISO/IEC and + ITU-T as a JPEG 2000 Reference Software. + """ + homepage = "https://github.com/uclouvain/openjpeg" + url = "https://github.com/uclouvain/openjpeg/archive/version.2.1.tar.gz" + + version('2.1' , '3e1c451c087f8462955426da38aa3b3d') + version('2.0.1', '105876ed43ff7dbb2f90b41b5a43cfa5') + version('2.0' , 'cdf266530fee8af87454f15feb619609') + version('1.5.2', '545f98923430369a6b046ef3632ef95c') + version('1.5.1', 'd774e4b5a0db5f0f171c4fc0aabfa14e') + + + def install(self, spec, prefix): + cmake('.', *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/p4est/package.py b/var/spack/repos/builtin/packages/p4est/package.py new file mode 100644 index 0000000000000000000000000000000000000000..1e2969fe642c285c3280e90a3e8d54df82a83e79 --- /dev/null +++ b/var/spack/repos/builtin/packages/p4est/package.py @@ -0,0 +1,34 @@ +from spack import * + +class P4est(Package): + """Dynamic management of a collection (a forest) of adaptive octrees in parallel""" + homepage = "http://www.p4est.org" + url = "http://p4est.github.io/release/p4est-1.1.tar.gz" + + version('1.1', '37ba7f4410958cfb38a2140339dbf64f') + + # disable by default to make it work on frontend of clusters + variant('tests', default=False, description='Run small tests') + + depends_on('mpi') + + def install(self, spec, prefix): + options = ['--enable-mpi', + '--enable-shared', + '--disable-vtk-binary', + '--without-blas', + 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL', + 'CFLAGS=-O2', + 'CC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # TODO: use ENV variables or MPI class wrappers + 'CXX=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'), + 'FC=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'), + 'F77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'), + ] + + configure('--prefix=%s' % prefix, *options) + + make() + if '+tests' in self.spec: + make("check") + + make("install") diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py index 62a8f7ca0ba9ecf03b5ae7a0da652b22718739ad..e6f8cf026bf2cc41f031dcd6dc0451fa217b7f22 100644 --- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py +++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py @@ -8,6 +8,7 @@ class ParallelNetcdf(Package): homepage = "https://trac.mcs.anl.gov/projects/parallel-netcdf" url = "http://cucis.ece.northwestern.edu/projects/PnetCDF/Release/parallel-netcdf-1.6.1.tar.gz" + version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0') version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34') depends_on("m4") diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py index c16054816c18e463ad999047ab205ffad8cb92b1..60f8d3c243da4735c6d85070f2f2dbf9f44344bc 100644 --- a/var/spack/repos/builtin/packages/paraview/package.py +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -27,13 +27,14 @@ class Paraview(Package): depends_on('bzip2') depends_on('freetype') - depends_on('hdf5+mpi', when='+mpi') - depends_on('hdf5~mpi', when='~mpi') + #depends_on('hdf5+mpi', when='+mpi') + #depends_on('hdf5~mpi', when='~mpi') depends_on('jpeg') depends_on('libpng') depends_on('libtiff') depends_on('libxml2') - depends_on('netcdf') + #depends_on('netcdf') + #depends_on('netcdf-cxx') #depends_on('protobuf') # version mismatches? #depends_on('sqlite') # external version not supported depends_on('zlib') @@ -75,13 +76,13 @@ def nfeature_to_bool(feature): cmake('..', '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, '-DBUILD_TESTING:BOOL=OFF', - '-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON', - '-DVTK_USER_SYSTEM_HDF5:BOOL=ON', - '-DVTK_USER_SYSTEM_JPEG:BOOL=ON', - '-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON', - '-DVTK_USER_SYSTEM_NETCDF:BOOL=ON', - '-DVTK_USER_SYSTEM_TIFF:BOOL=ON', - '-DVTK_USER_SYSTEM_ZLIB:BOOL=ON', + '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON', + '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF', + '-DVTK_USE_SYSTEM_JPEG:BOOL=ON', + '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON', + '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF', + '-DVTK_USE_SYSTEM_TIFF:BOOL=ON', + '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON', *feature_args) make() make('install') diff --git a/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch b/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch index 514781b8b88881d494aaf47bbfd7c5e1384fba81..e4f2729483638d2a1929cd905338dc51493282f2 100644 --- a/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch +++ b/var/spack/repos/builtin/packages/parmetis/enable_external_metis.patch @@ -1,13 +1,71 @@ diff --git a/CMakeLists.txt b/CMakeLists.txt -index ca945dd..1bf94e9 100644 +index ca945dd..aff8b5f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt +@@ -23,7 +23,7 @@ else() + set(ParMETIS_LIBRARY_TYPE STATIC) + endif() + +-include(${GKLIB_PATH}/GKlibSystem.cmake) ++include_directories(${GKLIB_PATH}) + + # List of paths that the compiler will search for header files. + # i.e., the -I equivalent @@ -33,7 +33,7 @@ include_directories(${GKLIB_PATH}) include_directories(${METIS_PATH}/include) - + # List of directories that cmake will look for CMakeLists.txt -add_subdirectory(${METIS_PATH}/libmetis ${CMAKE_BINARY_DIR}/libmetis) -+#add_subdirectory(${METIS_PATH}/libmetis ${CMAKE_BINARY_DIR}/libmetis) ++find_library(METIS_LIBRARY metis PATHS ${METIS_PATH}/lib) add_subdirectory(include) add_subdirectory(libparmetis) add_subdirectory(programs) +diff --git a/libparmetis/CMakeLists.txt b/libparmetis/CMakeLists.txt +index 9cfc8a7..e0c4de7 100644 +--- a/libparmetis/CMakeLists.txt ++++ b/libparmetis/CMakeLists.txt +@@ -5,7 +5,10 @@ file(GLOB parmetis_sources *.c) + # Create libparmetis + add_library(parmetis ${ParMETIS_LIBRARY_TYPE} ${parmetis_sources}) + # Link with metis and MPI libraries. +-target_link_libraries(parmetis metis ${MPI_LIBRARIES}) ++target_link_libraries(parmetis ${METIS_LIBRARY} ${MPI_LIBRARIES}) ++if(UNIX) ++ target_link_libraries(parmetis m) ++endif() + set_target_properties(parmetis PROPERTIES LINK_FLAGS "${MPI_LINK_FLAGS}") + + install(TARGETS parmetis +diff --git a/libparmetis/parmetislib.h b/libparmetis/parmetislib.h +index c1daeeb..07511f6 100644 +--- a/libparmetis/parmetislib.h ++++ b/libparmetis/parmetislib.h +@@ -20,13 +20,12 @@ + + #include <parmetis.h> + +-#include "../metis/libmetis/gklib_defs.h" ++#include <gklib_defs.h> + +-#include <mpi.h> ++#include <mpi.h> + + #include <rename.h> + #include <defs.h> + #include <struct.h> + #include <macros.h> + #include <proto.h> +- +diff --git a/programs/parmetisbin.h b/programs/parmetisbin.h +index e26cd2d..d156480 100644 +--- a/programs/parmetisbin.h ++++ b/programs/parmetisbin.h +@@ -19,7 +19,7 @@ + #include <GKlib.h> + #include <parmetis.h> + +-#include "../metis/libmetis/gklib_defs.h" ++#include <gklib_defs.h> + #include "../libparmetis/rename.h" + #include "../libparmetis/defs.h" + #include "../libparmetis/struct.h" diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py index c691cf4191d1ad45f08f8f1be4ddb2831fe6df6f..b49f8dae00dbd1b4d6b49bc27a1204de9d56e0a3 100644 --- a/var/spack/repos/builtin/packages/parmetis/package.py +++ b/var/spack/repos/builtin/packages/parmetis/package.py @@ -24,7 +24,7 @@ ############################################################################## from spack import * - +import sys class Parmetis(Package): """ @@ -44,7 +44,7 @@ class Parmetis(Package): depends_on('mpi') patch('enable_external_metis.patch') - depends_on('metis') + depends_on('metis@5:') # bug fixes from PETSc developers # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/ @@ -64,7 +64,7 @@ def install(self, spec, prefix): # FIXME : Once a contract is defined, MPI compilers should be retrieved indirectly via spec['mpi'] in case # FIXME : they use a non-standard name - options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=metis_source), # still need headers from METIS source, and they are not installed with METIS. shame... + options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=spec['metis'].prefix.include), '-DMETIS_PATH:PATH={metis_source}'.format(metis_source=spec['metis'].prefix), '-DCMAKE_C_COMPILER:STRING=mpicc', '-DCMAKE_CXX_COMPILER:STRING=mpicxx']) @@ -83,3 +83,7 @@ def install(self, spec, prefix): cmake(source_directory, *options) make() make("install") + + # The shared library is not installed correctly on Darwin; correct this + if (sys.platform == 'darwin') and ('+shared' in spec): + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py index 7239baaf7f559bfaac59bb77ad88faca6cef377a..1161dd7d67a08bdeb2e493df724eae8c83fed300 100644 --- a/var/spack/repos/builtin/packages/petsc/package.py +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -17,14 +17,18 @@ class Petsc(Package): version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') version('3.4.4', '7edbc68aa6d8d6a3295dd5f6c2f6979d') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('mpi', default=True, description='Activates MPI support') - variant('double', default=True, description='Switches between single and double precision') + variant('shared', default=True, description='Enables the build of shared libraries') + variant('mpi', default=True, description='Activates MPI support') + variant('double', default=True, description='Switches between single and double precision') + variant('complex', default=False, description='Build with complex numbers') + variant('debug', default=False, description='Compile in debug mode') - variant('metis', default=True, description='Activates support for metis and parmetis') - variant('hdf5', default=True, description='Activates support for HDF5 (only parallel)') - variant('boost', default=True, description='Activates support for Boost') - variant('hypre', default=True, description='Activates support for Hypre') + variant('metis', default=True, description='Activates support for metis and parmetis') + variant('hdf5', default=True, description='Activates support for HDF5 (only parallel)') + variant('boost', default=True, description='Activates support for Boost') + variant('hypre', default=True, description='Activates support for Hypre (only parallel)') + variant('mumps', default=True, description='Activates support for MUMPS (only parallel)') + variant('superlu-dist', default=True, description='Activates support for SuperluDist (only parallel)') # Virtual dependencies depends_on('blas') @@ -36,11 +40,17 @@ class Petsc(Package): # Other dependencies depends_on('boost', when='+boost') - depends_on('metis', when='+metis') + depends_on('metis@5:', when='+metis') depends_on('hdf5+mpi', when='+hdf5+mpi') depends_on('parmetis', when='+metis+mpi') - depends_on('hypre', when='+hypre+mpi') + # Hypre does not support complex numbers. + # Also PETSc prefer to build it without internal superlu, likely due to conflict in headers + # see https://bitbucket.org/petsc/petsc/src/90564b43f6b05485163c147b464b5d6d28cde3ef/config/BuildSystem/config/packages/hypre.py + depends_on('hypre~internal-superlu', when='+hypre+mpi~complex') + depends_on('superlu-dist', when='+superlu-dist+mpi') + depends_on('mumps+mpi', when='+mumps+mpi') + depends_on('scalapack', when='+mumps+mpi') def mpi_dependent_options(self): if '~mpi' in self.spec: @@ -55,38 +65,30 @@ def mpi_dependent_options(self): # If mpi is disabled (~mpi), it's an error to have any of these enabled. # This generates a list of any such errors. errors = [error_message_fmt.format(library=x) - for x in ('hdf5', 'hypre', 'parmetis') + for x in ('hdf5', 'hypre', 'parmetis','mumps','superlu-dist') if ('+'+x) in self.spec] if errors: errors = ['incompatible variants given'] + errors raise RuntimeError('\n'.join(errors)) else: - if self.compiler.name == "clang": - compiler_opts = [ - '--with-mpi=1', - '--with-cc=%s -Qunused-arguments' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # Avoid confusing PETSc config by clang: warning: argument unused during compilation - '--with-cxx=%s -Qunused-arguments' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'), - '--with-fc=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'), - '--with-f77=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif77'), - ] - else: - compiler_opts = [ - '--with-mpi=1', - '--with-mpi-dir=%s' % self.spec['mpi'].prefix, - ] + compiler_opts = [ + '--with-mpi=1', + '--with-mpi-dir=%s' % self.spec['mpi'].prefix, + ] return compiler_opts def install(self, spec, prefix): - options = ['--with-debugging=0', - '--with-ssl=0'] + options = ['--with-ssl=0'] options.extend(self.mpi_dependent_options()) options.extend([ '--with-precision=%s' % ('double' if '+double' in spec else 'single'), + '--with-scalar-type=%s' % ('complex' if '+complex' in spec else 'real'), '--with-shared-libraries=%s' % ('1' if '+shared' in spec else '0'), + '--with-debugging=%s' % ('1' if '+debug' in spec else '0'), '--with-blas-lapack-dir=%s' % spec['lapack'].prefix ]) # Activates library support if needed - for library in ('metis', 'boost', 'hdf5', 'hypre', 'parmetis'): + for library in ('metis', 'boost', 'hdf5', 'hypre', 'parmetis','mumps','scalapack'): options.append( '--with-{library}={value}'.format(library=library, value=('1' if library in spec else '0')) ) @@ -94,9 +96,24 @@ def install(self, spec, prefix): options.append( '--with-{library}-dir={path}'.format(library=library, path=spec[library].prefix) ) + # PETSc does not pick up SuperluDist from the dir as they look for superlu_dist_4.1.a + if 'superlu-dist' in spec: + options.extend([ + '--with-superlu_dist-include=%s' % spec['superlu-dist'].prefix.include, + '--with-superlu_dist-lib=%s' % join_path(spec['superlu-dist'].prefix.lib, 'libsuperlu_dist.a'), + '--with-superlu_dist=1' + ]) + else: + options.append( + '--with-superlu_dist=0' + ) configure('--prefix=%s' % prefix, *options) # PETSc has its own way of doing parallel make. make('MAKE_NP=%s' % make_jobs, parallel=False) make("install") + + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + # set up PETSC_DIR for everyone using PETSc package + spack_env.set('PETSC_DIR', self.prefix) diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py index 9964c6ce3463cf48d095abbe4c66f9e037c25421..a803bc3f9b8189210eca1c54b6b4acfb69b186a9 100644 --- a/var/spack/repos/builtin/packages/pkg-config/package.py +++ b/var/spack/repos/builtin/packages/pkg-config/package.py @@ -10,7 +10,12 @@ class PkgConfig(Package): parallel = False def install(self, spec, prefix): - configure("--prefix=%s" %prefix, "--enable-shared") + configure("--prefix=%s" %prefix, + "--enable-shared", + "--with-internal-glib") # There's a bootstrapping problem here; + # glib uses pkg-config as well, so + # break the cycle by using the internal + # glib. make() make("install") diff --git a/var/spack/repos/builtin/packages/py-bottleneck/package.py b/var/spack/repos/builtin/packages/py-bottleneck/package.py new file mode 100644 index 0000000000000000000000000000000000000000..0aa4208b4de378a6fa4ffaa1a2409bece47ca20a --- /dev/null +++ b/var/spack/repos/builtin/packages/py-bottleneck/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyBottleneck(Package): + """Bottleneck is a collection of fast NumPy array functions written in Cython.""" + homepage = "https://pypi.python.org/pypi/Bottleneck/1.0.0" + url = "https://pypi.python.org/packages/source/B/Bottleneck/Bottleneck-1.0.0.tar.gz" + + version('1.0.0', '380fa6f275bd24f27e7cf0e0d752f5d2') + + extends('python', ignore=r'bin/f2py$') + depends_on('py-numpy') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py index 68eb735ad9307891f56966abbaf902e49ccaf1be..072355026e1e6c96088a3daf1d228d1f1d0af03e 100644 --- a/var/spack/repos/builtin/packages/py-cython/package.py +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -3,10 +3,14 @@ class PyCython(Package): """The Cython compiler for writing C extensions for the Python language.""" homepage = "https://pypi.python.org/pypi/cython" - url = "https://pypi.python.org/packages/source/C/Cython/cython-0.22.tar.gz" + url = "https://pypi.python.org/packages/source/C/Cython/Cython-0.22.tar.gz" - version('0.21.2', 'd21adb870c75680dc857cd05d41046a4') + version('0.23.5', '66b62989a67c55af016c916da36e7514') + version('0.23.4', '157df1f69bcec6b56fd97e0f2e057f6e') + + # These versions contain illegal Python3 code... version('0.22', '1ae25add4ef7b63ee9b4af697300d6b6') + version('0.21.2', 'd21adb870c75680dc857cd05d41046a4') extends('python') diff --git a/var/spack/repos/builtin/packages/py-dask/package.py b/var/spack/repos/builtin/packages/py-dask/package.py new file mode 100644 index 0000000000000000000000000000000000000000..cf0a16f21e355dfd5a7d0ef888b955d78fad89d2 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dask/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyDask(Package): + """Minimal task scheduling abstraction""" + homepage = "https://github.com/dask/dask/" + url = "https://pypi.python.org/packages/source/d/dask/dask-0.8.1.tar.gz" + + version('0.8.1', '5dd8e3a3823b3bc62c9a6d192e2cb5b4') + + extends('python') + + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-dateutil/package.py b/var/spack/repos/builtin/packages/py-dateutil/package.py index 0a17f2f2d249f4d0c16fad0dd6dc4e784d7cd21c..b67e91ace605ebf17adb6a8ac6e9684db49c741b 100644 --- a/var/spack/repos/builtin/packages/py-dateutil/package.py +++ b/var/spack/repos/builtin/packages/py-dateutil/package.py @@ -7,6 +7,7 @@ class PyDateutil(Package): version('2.4.0', '75714163bb96bedd07685cdb2071b8bc') version('2.4.2', '4ef68e1c485b09e9f034e10473e5add2') + version('2.5.2', 'eafe168e8f404bf384514f5116eedbb6') extends('python') depends_on('py-setuptools') diff --git a/var/spack/repos/builtin/packages/py-decorator/package.py b/var/spack/repos/builtin/packages/py-decorator/package.py new file mode 100644 index 0000000000000000000000000000000000000000..abbd9f43d14f9536e433d9d072c1ce1486f8272b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-decorator/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyDecorator(Package): + """The aim of the decorator module it to simplify the usage of decorators for the average programmer, and to popularize decorators by showing various non-trivial examples.""" + homepage = "https://github.com/micheles/decorator" + url = "https://pypi.python.org/packages/source/d/decorator/decorator-4.0.9.tar.gz" + + version('4.0.9', 'f12c5651ccd707e12a0abaa4f76cd69a') + + extends('python') + + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-libxml2/package.py b/var/spack/repos/builtin/packages/py-libxml2/package.py deleted file mode 100644 index 59005428e4d1c9bcc520b0da5e8a107dee5573d0..0000000000000000000000000000000000000000 --- a/var/spack/repos/builtin/packages/py-libxml2/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyLibxml2(Package): - """A Python wrapper around libxml2.""" - homepage = "https://xmlsoft.org/python.html" - url = "ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.21.tar.gz" - - version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3') - - extends('python') - depends_on('libxml2') - depends_on('libxslt') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index 2167735fb89b78f1282261c0ea1f57d82e68f9a1..19194c942e7fb6466fee162f06329e21031b03ec 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -12,7 +12,7 @@ class PyMatplotlib(Package): variant('gui', default=False, description='Enable GUI') variant('ipython', default=False, description='Enable ipython support') - extends('python', ignore=r'bin/nosetests.*$|bin/pbr$') + extends('python', ignore=r'bin/nosetests.*$|bin/pbr$|bin/f2py$') depends_on('py-pyside', when='+gui') depends_on('py-ipython', when='+ipython') @@ -26,6 +26,7 @@ class PyMatplotlib(Package): depends_on('py-pbr') depends_on('py-funcsigs') + depends_on('pkg-config') depends_on('freetype') depends_on('qt', when='+gui') depends_on('bzip2') diff --git a/var/spack/repos/builtin/packages/py-mpmath/package.py b/var/spack/repos/builtin/packages/py-mpmath/package.py new file mode 100644 index 0000000000000000000000000000000000000000..4d3261ae8f9a03fd63fc99b8f318817b017c13e0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mpmath/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyMpmath(Package): + """A Python library for arbitrary-precision floating-point arithmetic.""" + homepage = "http://mpmath.org" + url = "https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz" + + version('0.19', 'd1b7e19dd6830d0d7b5e1bc93d46c02c') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-netcdf/package.py b/var/spack/repos/builtin/packages/py-netcdf/package.py new file mode 100644 index 0000000000000000000000000000000000000000..7faa15ad253c886dfd1f7904a7effe4659eba47d --- /dev/null +++ b/var/spack/repos/builtin/packages/py-netcdf/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyNetcdf(Package): + """Python interface to the netCDF Library.""" + homepage = "http://unidata.github.io/netcdf4-python" + url = "https://github.com/Unidata/netcdf4-python/tarball/v1.2.3.1rel" + + version('1.2.3.1', '4fc4320d4f2a77b894ebf8da1c9895af') + + extends('python') + depends_on('py-numpy') + depends_on('py-cython') + depends_on('netcdf') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py new file mode 100644 index 0000000000000000000000000000000000000000..893146ec3ea86891275cb779dc74dae7ee195dba --- /dev/null +++ b/var/spack/repos/builtin/packages/py-networkx/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyNetworkx(Package): + """NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks.""" + homepage = "http://networkx.github.io/" + url = "https://pypi.python.org/packages/source/n/networkx/networkx-1.11.tar.gz" + + version('1.11', '6ef584a879e9163013e9a762e1cf7cd1') + + extends('python') + + depends_on('py-decorator') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py index 4fee99098ed957a1cf1352a830b4c06d1f7d2421..c2c2b52e03e86bcf35e516f47012ea3feac28803 100644 --- a/var/spack/repos/builtin/packages/py-nose/package.py +++ b/var/spack/repos/builtin/packages/py-nose/package.py @@ -10,6 +10,7 @@ class PyNose(Package): version('1.3.4', '6ed7169887580ddc9a8e16048d38274d') version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16') + version('1.3.7', '4d3ad0ff07b61373d2cefc89c5d0b20b') extends('python', ignore=r'bin/nosetests.*$') depends_on('py-setuptools') diff --git a/var/spack/repos/builtin/packages/py-numexpr/package.py b/var/spack/repos/builtin/packages/py-numexpr/package.py index 89f8a525b12f5abe27d24c69b5ba2ec01ab1096d..081a79dec685a115ff2d419eb9c34145bc3d791f 100644 --- a/var/spack/repos/builtin/packages/py-numexpr/package.py +++ b/var/spack/repos/builtin/packages/py-numexpr/package.py @@ -7,8 +7,9 @@ class PyNumexpr(Package): url = "https://pypi.python.org/packages/source/n/numexpr/numexpr-2.4.6.tar.gz" version('2.4.6', '17ac6fafc9ea1ce3eb970b9abccb4fbd') + version('2.5', '84f66cced45ba3e30dcf77a937763aaa') - extends('python') + extends('python', ignore=r'bin/f2py$') depends_on('py-numpy') def install(self, spec, prefix): diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 03548111863f49e3a0e38556abf0f03a6aeeecae..40988fb44a75e0046fdd7828b801ca91f63ad2ac 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -1,24 +1,44 @@ from spack import * class PyNumpy(Package): - """array processing for numbers, strings, records, and objects.""" - homepage = "https://pypi.python.org/pypi/numpy" + """NumPy is the fundamental package for scientific computing with Python. + It contains among other things: a powerful N-dimensional array object, + sophisticated (broadcasting) functions, tools for integrating C/C++ and + Fortran code, and useful linear algebra, Fourier transform, and random + number capabilities""" + homepage = "http://www.numpy.org/" url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz" - version('1.9.1', '78842b73560ec378142665e712ae4ad9') - version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645') + version('1.11.0', 'bc56fb9fc2895aa4961802ffbdb31d0b') + version('1.10.4', 'aed294de0aa1ac7bd3f9745f4f1968ad') + version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645') + version('1.9.1', '78842b73560ec378142665e712ae4ad9') - variant('blas', default=True) + + variant('blas', default=True) + variant('lapack', default=True) extends('python') depends_on('py-nose') - depends_on('netlib-blas+fpic', when='+blas') - depends_on('netlib-lapack+shared', when='+blas') + depends_on('blas', when='+blas') + depends_on('lapack', when='+lapack') def install(self, spec, prefix): + libraries = [] + library_dirs = [] + if '+blas' in spec: + libraries.append('blas') + library_dirs.append(spec['blas'].prefix.lib) + if '+lapack' in spec: + libraries.append('lapack') + library_dirs.append(spec['lapack'].prefix.lib) + + if '+blas' in spec or '+lapack' in spec: with open('site.cfg', 'w') as f: f.write('[DEFAULT]\n') - f.write('libraries=lapack,blas\n') - f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix)) + f.write('libraries=%s\n' % ','.join(libraries)) + f.write('library_dirs=%s\n' % ':'.join(library_dirs)) + python('setup.py', 'install', '--prefix=%s' % prefix) + diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 5b9997faa94f0b2d4b4937ed8154a12618a19221..2320b1f92f386383f42e965e0469c23d8f27fbd8 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -8,18 +8,15 @@ class PyPandas(Package): version('0.16.0', 'bfe311f05dc0c351f8955fbd1e296e73') version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8') + version('0.18.0', 'f143762cd7a59815e348adf4308d2cf6') - extends('python') + extends('python', ignore=r'bin/f2py$') depends_on('py-dateutil') depends_on('py-numpy') - depends_on('py-matplotlib') - depends_on('py-scipy') depends_on('py-setuptools') depends_on('py-pytz') - depends_on('libdrm') - depends_on('libpciaccess') - depends_on('llvm') - depends_on('mesa') + depends_on('py-numexpr') + depends_on('py-bottleneck') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index adc8507bd579059385de361ce26e7cd1537e47c4..66d9bb43824d406165e729265351dd96c9a6fbee 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -1,4 +1,5 @@ from spack import * +import os class PyPillow(Package): """Pillow is the friendly PIL fork by Alex Clark and Contributors. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities.""" @@ -7,8 +8,68 @@ class PyPillow(Package): url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-3.0.0.tar.gz" version('3.0.0', 'fc8ac44e93da09678eac7e30c9b7377d') + provides('PIL') + + # These defaults correspond to Pillow defaults + variant('jpeg', default=True, description='Provide JPEG functionality') + variant('zlib', default=True, description='Access to compressed PNGs') + variant('tiff', default=False, description='Access to TIFF files') + variant('freetype', default=False, description='Font related services') + variant('tk', default=False, description='Support for tkinter bitmap and photo images') + variant('lcms', default=False, description='Color management') + + # Spack does not (yet) support these modes of building + # variant('webp', default=False, description='') + # variant('webpmux', default=False, description='') + # variant('jpeg2000', default=False, description='') + extends('python') + depends_on('binutils') depends_on('py-setuptools') + depends_on('jpeg', when='+jpeg') # BUG: It will use the system libjpeg anyway + depends_on('zlib', when='+zlib') + depends_on('tiff', when='+tiff') + depends_on('freetype', when='+freetype') + depends_on('lcms', when='+lcms') + depends_on('tcl', when='+tk') + depends_on('tk', when='+tk') + def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + libpath=[] + + if '+jpeg' in spec: + libpath.append(join_path(spec['jpeg'].prefix, 'lib')) + if '+zlib' in spec: + libpath.append(join_path(spec['zlib'].prefix, 'lib')) + if '+tiff' in spec: + libpath.append(join_path(spec['tiff'].prefix, 'lib')) + if '+freetype' in spec: + libpath.append(join_path(spec['freetype'].prefix, 'lib')) + if '+lcms' in spec: + libpath.append(join_path(spec['lcms'].prefix, 'lib')) + + # This has not been tested, and likely needs some other treatment. + #if '+tk' in spec: + # libpath.append(join_path(spec['tcl'].prefix, 'lib')) + # libpath.append(join_path(spec['tk'].prefix, 'lib')) + + # -------- Building + cmd = ['build_ext', + '--%s-jpeg' % ('enable' if '+jpeg' in spec else 'disable'), + '--%s-zlib' % ('enable' if '+zlib' in spec else 'disable'), + '--%s-tiff' % ('enable' if '+tiff' in spec else 'disable'), + '--%s-freetype' % ('enable' if '+freetype' in spec else 'disable'), + '--%s-lcms' % ('enable' if '+lcms' in spec else 'disable'), + '-L'+':'.join(libpath) # NOTE: This does not make it find libjpeg + ] + + #if '+tk' in spec: + # cmd.extend(['--enable-tcl', '--enable-tk']) + #else: + # cmd.extend(['--disable-tcl', '--disable-tk']) + + # --------- Installation + cmd.extend(['install', '--prefix=%s' % prefix]) + + python('setup.py', *cmd) diff --git a/var/spack/repos/builtin/packages/py-pytz/package.py b/var/spack/repos/builtin/packages/py-pytz/package.py index da6311a784af332ffc7c67fc325b823ea258665b..060cf0cde416f4afb23c1c51ee22ca33e984df27 100644 --- a/var/spack/repos/builtin/packages/py-pytz/package.py +++ b/var/spack/repos/builtin/packages/py-pytz/package.py @@ -7,6 +7,7 @@ class PyPytz(Package): version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7') version('2015.4', '417a47b1c432d90333e42084a605d3d8') + version('2016.3', 'abae92c3301b27bd8a9f56b14f52cb29') extends('python') diff --git a/var/spack/repos/builtin/packages/py-scikit-image/package.py b/var/spack/repos/builtin/packages/py-scikit-image/package.py new file mode 100644 index 0000000000000000000000000000000000000000..22ce1f837407b52bdad258af0b9ee61991251ca3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-scikit-image/package.py @@ -0,0 +1,20 @@ +from spack import * + +class PyScikitImage(Package): + """Image processing algorithms for SciPy, including IO, morphology, filtering, warping, color manipulation, object detection, etc.""" + homepage = "http://scikit-image.org/" + url = "https://pypi.python.org/packages/source/s/scikit-image/scikit-image-0.12.3.tar.gz" + + version('0.12.3', '04ea833383e0b6ad5f65da21292c25e1') + + extends('python', ignore=r'bin/.*\.py$|bin/f2py$') + + depends_on('py-dask') + depends_on('py-pillow') + depends_on('py-networkx') + depends_on('py-six') + depends_on('py-scipy') + depends_on('py-matplotlib') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 5b078ce9011ae4788abd1b6103f48ae63a3bcb91..2d7985b98c3bf67b7faeb41dedab0f27044cf626 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -7,8 +7,13 @@ class PyScikitLearn(Package): version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d') version('0.16.1', '363ddda501e3b6b61726aa40b8dbdb7e') + version('0.17.1', 'a2f8b877e6d99b1ed737144f5a478dfc') extends('python') + depends_on('py-setuptools') + depends_on('py-numpy') + depends_on('py-scipy') + def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index 3a1124cc15a95c8cdebf3b1e44c299ce2baee5f5..4d47c641eec94da389b5851fef666e180c33ec7c 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -2,17 +2,24 @@ class PyScipy(Package): """Scientific Library for Python.""" - homepage = "https://pypi.python.org/pypi/scipy" + homepage = "http://www.scipy.org/" url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz" - version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a') + version('0.17.0', '5ff2971e1ce90e762c59d2cd84837224') version('0.15.1', 'be56cd8e60591d6332aac792a5880110') + version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a') extends('python') depends_on('py-nose') - depends_on('py-numpy') - depends_on('blas') - depends_on('lapack') + depends_on('py-numpy+blas+lapack') def install(self, spec, prefix): + if 'atlas' in spec: + # libatlas.so actually isn't always installed, but this + # seems to make the build autodetect things correctly. + env['ATLAS'] = join_path(spec['atlas'].prefix.lib, 'libatlas.' + dso_suffix) + else: + env['BLAS'] = spec['blas'].blas_shared_lib + env['LAPACK'] = spec['lapack'].lapack_shared_lib + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py index 26c048bfd423e42bfd33194cf4c95a01c1c2b66b..1368711978d7b0d61d95044f2fa79bcfda69d9fe 100644 --- a/var/spack/repos/builtin/packages/py-setuptools/package.py +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -5,10 +5,13 @@ class PySetuptools(Package): homepage = "https://pypi.python.org/pypi/setuptools" url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz" - version('11.3.1', '01f69212e019a2420c1693fb43593930') - version('16.0', '0ace0b96233516fc5f7c857d086aa3ad') - version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06') + version('20.7.0', '5d12b39bf3e75e80fdce54e44b255615') + version('20.6.7', '45d6110f3ec14924e44c33411db64fe6') + version('20.5', 'fadc1e1123ddbe31006e5e43e927362b') version('19.2', '78353b1f80375ca5e088f4b4627ffe03') + version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06') + version('16.0', '0ace0b96233516fc5f7c857d086aa3ad') + version('11.3.1', '01f69212e019a2420c1693fb43593930') extends('python') diff --git a/var/spack/repos/builtin/packages/py-six/package.py b/var/spack/repos/builtin/packages/py-six/package.py index 05c5bd00a9831a5c9dc6b80941b57b803e8b12f4..df277100f77fae4fb9e245846a419c13411073de 100644 --- a/var/spack/repos/builtin/packages/py-six/package.py +++ b/var/spack/repos/builtin/packages/py-six/package.py @@ -6,6 +6,7 @@ class PySix(Package): url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz" version('1.9.0', '476881ef4012262dfc8adc645ee786c4') + version('1.10.0', '34eed507548117b2ab523ab14b2f8b55') extends('python') depends_on('py-setuptools') diff --git a/var/spack/repos/builtin/packages/py-sympy/package.py b/var/spack/repos/builtin/packages/py-sympy/package.py index c17e35b95f159d7cf505e53b5df10fcacfa0943a..bbce8c74e3c5e9d7b85200892193ef7cef399de8 100644 --- a/var/spack/repos/builtin/packages/py-sympy/package.py +++ b/var/spack/repos/builtin/packages/py-sympy/package.py @@ -6,8 +6,10 @@ class PySympy(Package): url = "https://pypi.python.org/packages/source/s/sympy/sympy-0.7.6.tar.gz" version('0.7.6', '3d04753974306d8a13830008e17babca') + version('1.0', '43e797de799f00f9e8fd2307dba9fab1') extends('python') + depends_on('py-mpmath', when='@1.0:') def install(self, spec, prefix): python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py new file mode 100644 index 0000000000000000000000000000000000000000..984b4196b1989c8f3853cf9b0f35260c26ec4954 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-tuiview/package.py @@ -0,0 +1,19 @@ +from spack import * + +class PyTuiview(Package): + """ + TuiView is a lightweight raster GIS with powerful raster attribute + table manipulation abilities. + """ + homepage = "https://bitbucket.org/chchrsc/tuiview" + url = "https://bitbucket.org/chchrsc/tuiview/get/tuiview-1.1.7.tar.gz" + + version('1.1.7', '4b3b38a820cc239c8ab4a181ac5d4c30') + + extends("python") + depends_on("py-pyqt") + depends_on("py-numpy") + depends_on("gdal") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py index 037a6fc59fe08579ce0c76b6f99eb3538baff6a5..09303d37b4315ca883682883943d3e0c551f52b8 100644 --- a/var/spack/repos/builtin/packages/py-virtualenv/package.py +++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py @@ -8,6 +8,7 @@ class PyVirtualenv(Package): version('1.11.6', 'f61cdd983d2c4e6aeabb70b1060d6f49') version('13.0.1', '1ffc011bde6667f0e37ecd976f4934db') + version('15.0.1', '28d76a0d9cbd5dc42046dd14e76a6ecc') extends('python') depends_on('py-setuptools') diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 4f55bc803ea7caae120af983f3cf12b92340d4e8..f5237c3b578b9c8900d45d8515428a1e5fe5b344 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -105,10 +105,13 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec): pythonpath = ':'.join(python_paths) spack_env.set('PYTHONPATH', pythonpath) - run_env.set('PYTHONPATH', pythonpath) + # For run time environment set only the path for extension_spec and prepend it to PYTHONPATH + if extension_spec.package.extends(self.spec): + run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir)) - def modify_module(self, module, spec, ext_spec): + + def setup_dependent_package(self, module, ext_spec): """ Called before python modules' install() methods. @@ -118,17 +121,18 @@ def modify_module(self, module, spec, ext_spec): """ # Python extension builds can have a global python executable function if self.version >= Version("3.0.0") and self.version < Version("4.0.0"): - module.python = Executable(join_path(spec.prefix.bin, 'python3')) + module.python = Executable(join_path(self.spec.prefix.bin, 'python3')) else: - module.python = Executable(join_path(spec.prefix.bin, 'python')) + module.python = Executable(join_path(self.spec.prefix.bin, 'python')) # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir) module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) - # Make the site packages directory if it does not exist already. - mkdirp(module.site_packages_dir) + # Make the site packages directory for extensions, if it does not exist already. + if ext_spec.package.is_extension: + mkdirp(module.site_packages_dir) # ======================================================================== # Handle specifics of activating and deactivating python modules. diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index d08e8e81e1a29255040921b2b9fce3dcb3c8587a..93688fb777f5e5ac6f07645d045fed123aef5f00 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -1,46 +1,36 @@ -import os from spack import * import os class Qt(Package): """Qt is a comprehensive cross-platform C++ application framework.""" - homepage = "http://qt.io" - list_url = 'http://download.qt-project.org/official_releases/qt/' - list_depth = 2 - - version('5.4.2', 'fa1c4d819b401b267eb246a543a63ea5', - url='http://download.qt-project.org/official_releases/qt/5.4/5.4.2/single/qt-everywhere-opensource-src-5.4.2.tar.gz') - - version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6', - url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz') - - version('5.3.2', 'febb001129927a70174467ecb508a682', - url='http://download.qt.io/archive/qt/5.3/5.3.2/single/qt-everywhere-opensource-src-5.3.2.tar.gz') - - version('5.2.1', 'a78408c887c04c34ce615da690e0b4c8', - url='http://download.qt.io/archive/qt/5.2/5.2.1/single/qt-everywhere-opensource-src-5.2.1.tar.gz') - - version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', - url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") + homepage = 'http://qt.io' - version('3.3.8b', '9f05b4125cfe477cc52c9742c3c09009', - url="http://download.qt.io/archive/qt/3/qt-x11-free-3.3.8b.tar.gz") + version('5.5.1', '59f0216819152b77536cf660b015d784') + version('5.4.2', 'fa1c4d819b401b267eb246a543a63ea5') + version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6') + version('5.3.2', 'febb001129927a70174467ecb508a682') + version('5.2.1', 'a78408c887c04c34ce615da690e0b4c8') + version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb') + version('3.3.8b', '9f05b4125cfe477cc52c9742c3c09009') - variant('mesa', default=False, description='depend on mesa') # Add patch for compile issues with qt3 found with use in the OpenSpeedShop project - variant('krellpatch', default=False, description="build with openspeedshop based patch.") + variant('krellpatch', default=False, description="Build with openspeedshop based patch.") + variant('mesa', default=False, description="Depend on mesa.") + variant('gtk', default=False, description="Build with gtkplus.") + patch('qt3krell.patch', when='@3.3.8b+krellpatch') # Use system openssl for security. #depends_on("openssl") depends_on("glib") - depends_on("gtkplus") + depends_on("gtkplus", when='+gtk') depends_on("libxml2") depends_on("zlib") depends_on("dbus", when='@4:') depends_on("libtiff") - depends_on("libpng") + depends_on("libpng@1.2.56", when='@3') + depends_on("libpng", when='@4:') depends_on("libmng") depends_on("jpeg") @@ -56,6 +46,34 @@ class Qt(Package): depends_on("libxcb") + def url_for_version(self, version): + url = "http://download.qt.io/archive/qt/" + + if version >= Version('5'): + url += "%s/%s/single/qt-everywhere-opensource-src-%s.tar.gz" % \ + (version.up_to(2), version, version) + elif version >= Version('4.8'): + url += "%s/%s/qt-everywhere-opensource-src-%s.tar.gz" % \ + (version.up_to(2), version, version) + elif version >= Version('4.6'): + url += "%s/qt-everywhere-opensource-src-%s.tar.gz" % \ + (version.up_to(2), version) + elif version >= Version('4.0'): + url += "%s/qt-x11-opensource-src-%s.tar.gz" % \ + (version.up_to(2), version) + elif version >= Version('3'): + url += "%s/qt-x11-free-%s.tar.gz" % \ + (version.up_to(1), version) + elif version >= Version('2.1'): + url += "%s/qt-x11-%s.tar.gz" % \ + (version.up_to(1), version) + else: + url += "%s/qt-%s.tar.gz" % \ + (version.up_to(1), version) + + return url + + def setup_environment(self, spack_env, env): env.set('QTDIR', self.prefix) @@ -88,7 +106,7 @@ def common_config_args(self): '-v', '-opensource', '-opengl', - "-release", + '-release', '-shared', '-confirm-license', '-openssl-linked', @@ -97,12 +115,14 @@ def common_config_args(self): '-no-openvg', '-no-pch', # NIS is deprecated in more recent glibc - "-no-nis"] + '-no-nis'] # Don't disable all the database drivers, but should # really get them into spack at some point. @when('@3') def configure(self): + # An user report that this was necessary to link Qt3 on ubuntu + os.environ['LD_LIBRARY_PATH'] = os.getcwd()+'/lib' configure('-prefix', self.prefix, '-v', '-thread', diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py index 1b870e0e7ffafb18c3d3144ebc65506c399ab8cc..0c429ea756807d6a2988d8a2536760035320a624 100644 --- a/var/spack/repos/builtin/packages/readline/package.py +++ b/var/spack/repos/builtin/packages/readline/package.py @@ -2,12 +2,12 @@ class Readline(Package): """The GNU Readline library provides a set of functions for use by - applications that allow users to edit command li nes as they + applications that allow users to edit command lines as they are typed in. Both Emacs and vi editing modes are available. The Readline library includes additional functions to maintain a list of previously-entered command lines, to recall and perhaps reedit those lines, and perform csh-like - history expansion on previous commands. """ + history expansion on previous commands.""" homepage = "http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html" url = "ftp://ftp.cwru.edu/pub/bash/readline-6.3.tar.gz" diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py index 7ff1898ce9131cbacaa09b29267fac2ed550e76a..e13677e4d22d00ae1097caa4f5fbe7be186d00b8 100644 --- a/var/spack/repos/builtin/packages/ruby/package.py +++ b/var/spack/repos/builtin/packages/ruby/package.py @@ -30,7 +30,7 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec): # The actual installation path for this gem spack_env.set('GEM_HOME', extension_spec.prefix) - def modify_module(self, module, spec, ext_spec): + def setup_dependent_package(self, module, ext_spec): """Called before ruby modules' install() methods. Sets GEM_HOME and GEM_PATH to values appropriate for the package being built. @@ -39,5 +39,5 @@ def modify_module(self, module, spec, ext_spec): gem('install', '<gem-name>.gem') """ # Ruby extension builds have global ruby and gem functions - module.ruby = Executable(join_path(spec.prefix.bin, 'ruby')) - module.gem = Executable(join_path(spec.prefix.bin, 'gem')) + module.ruby = Executable(join_path(self.spec.prefix.bin, 'ruby')) + module.gem = Executable(join_path(self.spec.prefix.bin, 'gem')) diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py index d1aed78e0e89f3badd987df51a1d5517b0148984..b7894e4d2bd43097538ed6156d2ce4a77b9d9551 100644 --- a/var/spack/repos/builtin/packages/silo/package.py +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -5,24 +5,35 @@ class Silo(Package): data to binary, disk files.""" homepage = "http://wci.llnl.gov/simulation/computer-codes/silo" - url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-4.8/silo-4.8.tar.gz" + base_url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo" + version('4.10.2', '9ceac777a2f2469ac8cef40f4fab49c8') + version('4.9', 'a83eda4f06761a86726e918fc55e782a') version('4.8', 'b1cbc0e7ec435eb656dc4b53a23663c9') variant('fortran', default=True, description='Enable Fortran support') + variant('silex', default=False, description='Builds Silex, a GUI for viewing Silo files') - depends_on("hdf5") + depends_on('hdf5') + depends_on('qt', when='+silex') def install(self, spec, prefix): config_args = [ '--enable-fortran' if '+fortran' in spec else '--disable-fortran', + '--enable-silex' if '+silex' in spec else '--disable-silex', ] + if '+silex' in spec: + config_args.append('--with-Qt-dir=%s' % spec['qt'].prefix) + configure( - "--prefix=%s" % prefix, - "--with-hdf5=%s,%s" % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib), - "--with-zlib=%s,%s" % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib), + '--prefix=%s' % prefix, + '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib), + '--with-zlib=%s,%s' % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib), *config_args) make() - make("install") + make('install') + + def url_for_version(self, version): + return '%s/silo-%s/silo-%s.tar.gz' % (Silo.base_url, version, version) diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py new file mode 100644 index 0000000000000000000000000000000000000000..8b5f24394f6ecf537d3291c61d974191705c29eb --- /dev/null +++ b/var/spack/repos/builtin/packages/slepc/package.py @@ -0,0 +1,49 @@ +import os +from spack import * + + +class Slepc(Package): + """ + Scalable Library for Eigenvalue Computations. + """ + + homepage = "http://www.grycap.upv.es/slepc" + url = "http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz" + + version('3.6.2', '2ab4311bed26ccf7771818665991b2ea3a9b15f97e29fd13911ab1293e8e65df') + + variant('arpack', default=False, description='Enables Arpack wrappers') + + depends_on('petsc') + depends_on('arpack-ng~mpi',when='+arpack^petsc~mpi') + depends_on('arpack-ng+mpi',when='+arpack^petsc+mpi') + + def install(self, spec, prefix): + # set SLEPC_DIR for installation + os.environ['SLEPC_DIR'] = self.stage.source_path + + options = [] + + if '+arpack' in spec: + options.extend([ + '--with-arpack-dir=%s' % spec['arpack-ng'].prefix.lib, + ]) + if 'arpack-ng~mpi' in spec: + options.extend([ + '--with-arpack-flags=-larpack' + ]) + else: + options.extend([ + '--with-arpack-flags=-lparpack,-larpack' + ]) + + configure('--prefix=%s' % prefix, *options) + + make('MAKE_NP=%s' % make_jobs, parallel=False) + #FIXME: + # make('test') + make('install') + + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + # set up SLEPC_DIR for everyone using SLEPc package + spack_env.set('SLEPC_DIR', self.prefix) diff --git a/var/spack/repos/builtin/packages/subversion/package.py b/var/spack/repos/builtin/packages/subversion/package.py new file mode 100644 index 0000000000000000000000000000000000000000..5db1c3eb92dc9e291d8b5e79ab2f831195f0dcd1 --- /dev/null +++ b/var/spack/repos/builtin/packages/subversion/package.py @@ -0,0 +1,77 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +#import os + +class Subversion(Package): + """Apache Subversion - an open source version control system.""" + homepage = 'https://subversion.apache.org/' + url = 'http://archive.apache.org/dist/subversion/subversion-1.8.13.tar.gz' + + version('1.8.13', '8065b3698d799507fb72dd7926ed32b6') + version('1.9.3', 'a92bcfaec4e5038f82c74a7b5bbd2f46') + + depends_on('apr') + depends_on('apr-util') + depends_on('zlib') + depends_on('sqlite') + + # Optional: We need swig if we want the Perl, Python or Ruby + # bindings. + #depends_on('swig') + #depends_on('python') + #depends_on('perl') + #depends_on('ruby') + + def install(self, spec, prefix): + + # configure, build, install: + # Ref: http://www.linuxfromscratch.org/blfs/view/svn/general/subversion.html + options = ['--prefix=%s' % prefix] + options.append('--with-apr=%s' % spec['apr'].prefix) + options.append('--with-apr-util=%s' % spec['apr-util'].prefix) + options.append('--with-zlib=%s' % spec['zlib'].prefix) + options.append('--with-sqlite=%s' % spec['sqlite'].prefix) + #options.append('--with-swig=%s' % spec['swig'].prefix) + + configure(*options) + make() + make('install') + + # python bindings + #make('swig-py', + # 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn', + # 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn') + #make('install-swig-py', + # 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn', + # 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn') + + # perl bindings + #make('swig-pl') + #make('install-swig-pl') + + # ruby bindings + #make('swig-rb') + #make('isntall-swig-rb') diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py index b57f9967c30cf2ee51ecdeb7f691df7ae663abce..a4b3979a1573fd308829324d059be6fd5ea2ac89 100644 --- a/var/spack/repos/builtin/packages/suite-sparse/package.py +++ b/var/spack/repos/builtin/packages/suite-sparse/package.py @@ -10,10 +10,18 @@ class SuiteSparse(Package): version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319') + # FIXME: (see below) + # variant('tbb', default=True, description='Build with Intel TBB') + depends_on('blas') depends_on('lapack') depends_on('metis@5.1.0', when='@4.5.1') + # FIXME: + # in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng flags + # does not seem to be used, which leads to linking errors on Linux. + # Try re-enabling in future versions. + # depends_on('tbb', when='+tbb') def install(self, spec, prefix): # The build system of SuiteSparse is quite old-fashioned @@ -21,6 +29,35 @@ def install(self, spec, prefix): # with a lot of convoluted logic in it. # Any kind of customization will need to go through filtering of that file - # FIXME : this actually uses the current workaround + make_args = ['INSTALL=%s' % prefix] + + # inject Spack compiler wrappers + make_args.extend([ + 'AUTOCC=no', + 'CC=cc', + 'CXX=c++', + 'F77=f77', + ]) + + # use Spack's metis in CHOLMOD/Partition module, + # otherwise internal Metis will be compiled + make_args.extend([ + 'MY_METIS_LIB=-L%s -lmetis' % spec['metis'].prefix.lib, + 'MY_METIS_INC=%s' % spec['metis'].prefix.include, + ]) + + # Intel TBB in SuiteSparseQR + if '+tbb' in spec: + make_args.extend([ + 'SPQR_CONFIG=-DHAVE_TBB', + 'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib, + ]) + + # BLAS arguments require path to libraries # FIXME : (blas / lapack always provide libblas and liblapack as aliases) - make('install', 'INSTALL=%s' % prefix, 'BLAS=-lblas', 'LAPACK=-llapack') + make_args.extend([ + 'BLAS=-lblas', + 'LAPACK=-llapack' + ]) + + make('install', *make_args) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index c4c76909b359b9e64d4218fbf470ca2cd8671857..5cf5e129b41c80725b34b24a58c0efd82a670568 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -1,4 +1,5 @@ from spack import * +import glob class SuperluDist(Package): """A general purpose library for the direct solution of large, sparse, nonsymmetric systems of linear equations on high performance machines.""" @@ -14,7 +15,7 @@ class SuperluDist(Package): depends_on ('blas') depends_on ('lapack') depends_on ('parmetis') - depends_on ('metis') + depends_on ('metis@5:') def install(self, spec, prefix): makefile_inc = [] @@ -52,12 +53,13 @@ def install(self, spec, prefix): # system "make" # need to install by hand - headers_location = join_path(self.prefix.include,'superlu_dist') + headers_location = self.prefix.include mkdirp(headers_location) - # FIXME: fetch all headers in the folder automatically - for header in ['Cnames.h','cublas_utils.h','dcomplex.h','html_mainpage.h','machines.h','old_colamd.h','psymbfact.h','superlu_ddefs.h','superlu_defs.h','superlu_enum_consts.h','superlu_zdefs.h','supermatrix.h','util_dist.h']: - superludist_header = join_path(self.stage.source_path, 'SRC/',header) - install(superludist_header, headers_location) + mkdirp(prefix.lib) + + headers = glob.glob(join_path(self.stage.source_path, 'SRC','*.h')) + for h in headers: + install(h,headers_location) superludist_lib = join_path(self.stage.source_path, 'lib/libsuperlu_dist.a') install(superludist_lib,self.prefix.lib) diff --git a/var/spack/repos/builtin/packages/tbb/package.py b/var/spack/repos/builtin/packages/tbb/package.py new file mode 100644 index 0000000000000000000000000000000000000000..56ffe4c27c649b09336d5be8867b7ceaa5c4ef79 --- /dev/null +++ b/var/spack/repos/builtin/packages/tbb/package.py @@ -0,0 +1,79 @@ +from spack import * +import os +import glob + +class Tbb(Package): + """Widely used C++ template library for task parallelism. + Intel Threading Building Blocks (Intel TBB) lets you easily write parallel + C++ programs that take full advantage of multicore performance, that are + portable and composable, and that have future-proof scalability. + """ + homepage = "http://www.threadingbuildingblocks.org/" + + # Only version-specific URL's work for TBB + version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1', url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz') + + def coerce_to_spack(self,tbb_build_subdir): + for compiler in ["icc","gcc","clang"]: + fs = glob.glob(join_path(tbb_build_subdir,"*.%s.inc" % compiler )) + for f in fs: + lines = open(f).readlines() + of = open(f,"w") + for l in lines: + if l.strip().startswith("CPLUS ="): + of.write("# coerced to spack\n") + of.write("CPLUS = $(CXX)\n") + elif l.strip().startswith("CPLUS ="): + of.write("# coerced to spack\n") + of.write("CONLY = $(CC)\n") + else: + of.write(l); + + def install(self, spec, prefix): + # + # we need to follow TBB's compiler selection logic to get the proper build + link flags + # but we still need to use spack's compiler wrappers + # to accomplish this, we do two things: + # + # * Look at the spack spec to determine which compiler we should pass to tbb's Makefile + # + # * patch tbb's build system to use the compiler wrappers (CC, CXX) for + # icc, gcc, clang + # (see coerce_to_spack()) + # + self.coerce_to_spack("build") + + if spec.satisfies('%clang'): + tbb_compiler = "clang" + elif spec.satisfies('%intel'): + tbb_compiler = "icc" + else: + tbb_compiler = "gcc" + + + mkdirp(prefix) + mkdirp(prefix.lib) + + # + # tbb does not have a configure script or make install target + # we simply call make, and try to put the pieces together + # + make("compiler=%s" %(tbb_compiler)) + + # install headers to {prefix}/include + install_tree('include',prefix.include) + + # install libs to {prefix}/lib + tbb_lib_names = ["libtbb", + "libtbbmalloc", + "libtbbmalloc_proxy"] + + for lib_name in tbb_lib_names: + # install release libs + fs = glob.glob(join_path("build","*release",lib_name + ".*")) + for f in fs: + install(f, prefix.lib) + # install debug libs if they exist + fs = glob.glob(join_path("build","*debug",lib_name + "_debug.*")) + for f in fs: + install(f, prefix.lib) diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py index 529adf778832c3350e08fccbb11abd080d5b302e..db8bee88d0d18ac07ebbd1f898b09bc8b522962f 100644 --- a/var/spack/repos/builtin/packages/tcl/package.py +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -10,8 +10,13 @@ class Tcl(Package): extensible.""" homepage = "http://www.tcl.tk" - version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f', - url="http://prdownloads.sourceforge.net/tcl/tcl8.6.3-src.tar.gz") + def url_for_version(self, version): + return 'http://prdownloads.sourceforge.net/tcl/tcl%s-src.tar.gz' % version + + version('8.6.5', '0e6426a4ca9401825fbc6ecf3d89a326') + version('8.6.4', 'd7cbb91f1ded1919370a30edd1534304') + version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f') + version('8.5.19', '0e6426a4ca9401825fbc6ecf3d89a326') depends_on('zlib') diff --git a/var/spack/repos/builtin/packages/tk/package.py b/var/spack/repos/builtin/packages/tk/package.py index 96736f6f95d68ad0d4295d4871e5d36ad17a1799..839d217f344affeaead5d1896daa5b0baa767185 100644 --- a/var/spack/repos/builtin/packages/tk/package.py +++ b/var/spack/repos/builtin/packages/tk/package.py @@ -8,9 +8,11 @@ class Tk(Package): applications that run unchanged across Windows, Mac OS X, Linux and more.""" homepage = "http://www.tcl.tk" - url = "http://prdownloads.sourceforge.net/tcl/tk8.6.3-src.tar.gz" - version('src', '85ca4dbf4dcc19777fd456f6ee5d0221') + def url_for_version(self, version): + return "http://prdownloads.sourceforge.net/tcl/tk%s-src.tar.gz" % version + + version('8.6.3', '85ca4dbf4dcc19777fd456f6ee5d0221') depends_on("tcl") diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index edc40476e30063c83b56e30758f62dc7b43ccfeb..0f72055fa72fbef752684c60b3c7e6f6ee729199 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -1,15 +1,22 @@ from spack import * +import os, sys, glob - +# Trilinos is complicated to build, as an inspiration a couple of links to other repositories which build it: +# https://github.com/hpcugent/easybuild-easyblocks/blob/master/easybuild/easyblocks/t/trilinos.py#L111 +# https://github.com/koecher/candi/blob/master/deal.II-toolchain/packages/trilinos.package +# https://gitlab.com/configurations/cluster-config/blob/master/trilinos.sh +# https://github.com/Homebrew/homebrew-science/blob/master/trilinos.rb +# and some relevant documentation/examples: +# https://github.com/trilinos/Trilinos/issues/175 class Trilinos(Package): - """ - The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented + """The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented software framework for the solution of large-scale, complex multi-physics engineering and scientific problems. A unique design feature of Trilinos is its focus on packages. """ homepage = "https://trilinos.org/" url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" + version('12.6.1', 'adcf2d3aab74cdda98f88fee19cd1442604199b0515ee3da4d80cbe8f37d00e4') version('12.4.2', '7c830f7f0f68b8ad324690603baf404e') version('12.2.1', '6161926ea247863c690e927687f83be9') version('12.0.1', 'bd99741d047471e127b8296b2ec08017') @@ -17,8 +24,16 @@ class Trilinos(Package): version('11.14.2', 'a43590cf896c677890d75bfe75bc6254') version('11.14.1', '40febc57f76668be8b6a77b7607bb67f') - variant('shared', default=True, description='Enables the build of shared libraries') - variant('debug', default=False, description='Builds a debug version of the libraries') + variant('metis', default=True, description='Compile with METIS and ParMETIS') + variant('mumps', default=True, description='Compile with support for MUMPS solvers') + variant('superlu-dist', default=True, description='Compile with SuperluDist solvers') + variant('hypre', default=True, description='Compile with Hypre preconditioner') + variant('hdf5', default=True, description='Compile with HDF5') + variant('suite-sparse', default=True, description='Compile with SuiteSparse solvers') + # not everyone has py-numpy activated, keep it disabled by default to avoid configure errors + variant('python', default=False, description='Build python wrappers') + variant('shared', default=True, description='Enables the build of shared libraries') + variant('debug', default=False, description='Builds a debug version of the libraries') # Everything should be compiled with -fpic depends_on('blas') @@ -27,28 +42,205 @@ class Trilinos(Package): depends_on('matio') depends_on('glm') depends_on('swig') + depends_on('metis@5:',when='+metis') + depends_on('suite-sparse',when='+suite-sparse') # MPI related dependencies depends_on('mpi') depends_on('netcdf+mpi') + depends_on('parmetis',when='+metis') + # Trilinos' Tribits config system is limited which makes it + # very tricky to link Amesos with static MUMPS, see + # https://trilinos.org/docs/dev/packages/amesos2/doc/html/classAmesos2_1_1MUMPS.html + # One could work it out by getting linking flags from mpif90 --showme:link (or alike) + # and adding results to -DTrilinos_EXTRA_LINK_FLAGS + # together with Blas and Lapack and ScaLAPACK and Blacs and -lgfortran and + # it may work at the end. But let's avoid all this by simply using shared libs + depends_on('mumps@5.0:+mpi+shared',when='+mumps') + depends_on('scalapack',when='+mumps') + depends_on('superlu-dist',when='+superlu-dist') + depends_on('hypre~internal-superlu',when='+hypre') + depends_on('hdf5+mpi',when='+hdf5') + + depends_on('python',when='+python') - depends_on('python') # Needs py-numpy activated + patch('umfpack_from_suitesparse.patch') + + # check that the combination of variants makes sense + def variants_check(self): + if '+superlu-dist' in self.spec and self.spec.satisfies('@:11.4.3'): + # For Trilinos v11 we need to force SuperLUDist=OFF, + # since only the deprecated SuperLUDist v3.3 together with an Amesos patch + # is working. + raise RuntimeError('The superlu-dist variant can only be used with Trilinos @12.0.1:') def install(self, spec, prefix): + self.variants_check() + + cxx_flags = [] options = [] options.extend(std_cmake_args) + mpi_bin = spec['mpi'].prefix.bin options.extend(['-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', + '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON', + '-DTrilinos_VERBOSE_CONFIGURE:BOOL=OFF', '-DTrilinos_ENABLE_TESTS:BOOL=OFF', '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF', - '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'), + '-DCMAKE_BUILD_TYPE:STRING=%s' % ('DEBUG' if '+debug' in spec else 'RELEASE'), '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'), - '-DTPL_ENABLE_MPI:STRING=ON', - '-DBLAS_LIBRARY_DIRS:PATH=%s' % spec['blas'].prefix, - '-DLAPACK_LIBRARY_DIRS:PATH=%s' % spec['lapack'].prefix + '-DTPL_ENABLE_MPI:BOOL=ON', + '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix, + '-DTPL_ENABLE_BLAS=ON', + '-DBLAS_LIBRARY_NAMES=blas', # FIXME: don't hardcode names + '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib, + '-DTPL_ENABLE_LAPACK=ON', + '-DLAPACK_LIBRARY_NAMES=lapack', + '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix, + '-DTPL_ENABLE_Boost:BOOL=ON', + '-DBoost_INCLUDE_DIRS:PATH=%s' % spec['boost'].prefix.include, + '-DBoost_LIBRARY_DIRS:PATH=%s' % spec['boost'].prefix.lib, + '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', + '-DTrilinos_ENABLE_CXX11:BOOL=ON', + '-DTPL_ENABLE_Netcdf:BOOL=ON', + '-DTPL_ENABLE_HYPRE:BOOL=%s' % ('ON' if '+hypre' in spec else 'OFF'), + '-DTPL_ENABLE_HDF5:BOOL=%s' % ('ON' if '+hdf5' in spec else 'OFF'), ]) + # Fortran lib + libgfortran = os.path.dirname (os.popen('%s --print-file-name libgfortran.a' % join_path(mpi_bin,'mpif90') ).read()) + options.extend([ + '-DTrilinos_EXTRA_LINK_FLAGS:STRING=-L%s/ -lgfortran' % libgfortran, + '-DTrilinos_ENABLE_Fortran=ON' + ]) + + # for build-debug only: + #options.extend([ + # '-DCMAKE_VERBOSE_MAKEFILE:BOOL=TRUE' + #]) + + # suite-sparse related + if '+suite-sparse' in spec: + options.extend([ + '-DTPL_ENABLE_Cholmod:BOOL=OFF', # FIXME: Trilinos seems to be looking for static libs only, patch CMake TPL file? + #'-DTPL_ENABLE_Cholmod:BOOL=ON', + #'-DCholmod_LIBRARY_DIRS:PATH=%s' % spec['suite-sparse'].prefix.lib, + #'-DCholmod_INCLUDE_DIRS:PATH=%s' % spec['suite-sparse'].prefix.include, + '-DTPL_ENABLE_UMFPACK:BOOL=ON', + '-DUMFPACK_LIBRARY_DIRS:PATH=%s' % spec['suite-sparse'].prefix.lib, + '-DUMFPACK_INCLUDE_DIRS:PATH=%s' % spec['suite-sparse'].prefix.include, + '-DUMFPACK_LIBRARY_NAMES=umfpack;amd;colamd;cholmod;suitesparseconfig' + ]) + else: + options.extend([ + '-DTPL_ENABLE_Cholmod:BOOL=OFF', + '-DTPL_ENABLE_UMFPACK:BOOL=OFF', + ]) + + # metis / parmetis + if '+metis' in spec: + options.extend([ + '-DTPL_ENABLE_METIS:BOOL=ON', + '-DMETIS_LIBRARY_DIRS=%s' % spec['metis'].prefix.lib, + '-DMETIS_LIBRARY_NAMES=metis', + '-DTPL_METIS_INCLUDE_DIRS=%s' % spec['metis'].prefix.include, + '-DTPL_ENABLE_ParMETIS:BOOL=ON', + '-DParMETIS_LIBRARY_DIRS=%s;%s' % (spec['parmetis'].prefix.lib,spec['metis'].prefix.lib), + '-DParMETIS_LIBRARY_NAMES=parmetis;metis', + '-DTPL_ParMETIS_INCLUDE_DIRS=%s' % spec['parmetis'].prefix.include + ]) + else: + options.extend([ + '-DTPL_ENABLE_METIS:BOOL=OFF', + '-DTPL_ENABLE_ParMETIS:BOOL=OFF', + ]) + + # mumps / scalapack + if '+mumps' in spec: + options.extend([ + '-DTPL_ENABLE_MUMPS:BOOL=ON', + '-DMUMPS_LIBRARY_DIRS=%s' % spec['mumps'].prefix.lib, + '-DMUMPS_LIBRARY_NAMES=dmumps;mumps_common;pord', # order is important! + '-DTPL_ENABLE_SCALAPACK:BOOL=ON', + '-DSCALAPACK_LIBRARY_NAMES=scalapack' # FIXME: for MKL it's mkl_scalapack_lp64;mkl_blacs_mpich_lp64 + ]) + # see https://github.com/trilinos/Trilinos/blob/master/packages/amesos/README-MUMPS + cxx_flags.extend([ + '-DMUMPS_5_0' + ]) + else: + options.extend([ + '-DTPL_ENABLE_MUMPS:BOOL=OFF', + '-DTPL_ENABLE_SCALAPACK:BOOL=OFF', + ]) + + # superlu-dist: + if '+superlu-dist' in spec: + # Amesos, conflicting types of double and complex SLU_D + # see https://trilinos.org/pipermail/trilinos-users/2015-March/004731.html + # and https://trilinos.org/pipermail/trilinos-users/2015-March/004802.html + options.extend([ + '-DTeuchos_ENABLE_COMPLEX:BOOL=OFF', + '-DKokkosTSQR_ENABLE_Complex:BOOL=OFF' + ]) + options.extend([ + '-DTPL_ENABLE_SuperLUDist:BOOL=ON', + '-DSuperLUDist_LIBRARY_DIRS=%s' % spec['superlu-dist'].prefix.lib, + '-DSuperLUDist_INCLUDE_DIRS=%s' % spec['superlu-dist'].prefix.include + ]) + if spec.satisfies('^superlu-dist@4.0:'): + options.extend([ + '-DHAVE_SUPERLUDIST_LUSTRUCTINIT_2ARG:BOOL=ON' + ]) + else: + options.extend([ + '-DTPL_ENABLE_SuperLUDist:BOOL=OFF', + ]) + + + # python + if '+python' in spec: + options.extend([ + '-DTrilinos_ENABLE_PyTrilinos:BOOL=ON' + ]) + else: + options.extend([ + '-DTrilinos_ENABLE_PyTrilinos:BOOL=OFF' + ]) + + # collect CXX flags: + options.extend([ + '-DCMAKE_CXX_FLAGS:STRING=%s' % (' '.join(cxx_flags)), + ]) + + # disable due to compiler / config errors: + options.extend([ + '-DTrilinos_ENABLE_SEACAS=OFF', + '-DTrilinos_ENABLE_Pike=OFF', + '-DTrilinos_ENABLE_STK=OFF' + ]) + if sys.platform == 'darwin': + options.extend([ + '-DTrilinos_ENABLE_FEI=OFF' + ]) + + with working_dir('spack-build', create=True): cmake('..', *options) make() make('install') + + # When trilinos is built with Python, libpytrilinos is included through + # cmake configure files. Namely, Trilinos_LIBRARIES in TrilinosConfig.cmake + # contains pytrilinos. This leads to a run-time error: + # Symbol not found: _PyBool_Type + # and prevents Trilinos to be used in any C++ code, which links executable + # against the libraries listed in Trilinos_LIBRARIES. + # See https://github.com/Homebrew/homebrew-science/issues/2148#issuecomment-103614509 + # A workaround it to remove PyTrilinos from the COMPONENTS_LIST : + if '+python' in self.spec: + filter_file(r'(SET\(COMPONENTS_LIST.*)(PyTrilinos;)(.*)', (r'\1\3'), '%s/cmake/Trilinos/TrilinosConfig.cmake' % prefix.lib) + + # The shared libraries are not installed correctly on Darwin; correct this + if (sys.platform == 'darwin') and ('+shared' in spec): + fix_darwin_install_name(prefix.lib) diff --git a/var/spack/repos/builtin/packages/trilinos/umfpack_from_suitesparse.patch b/var/spack/repos/builtin/packages/trilinos/umfpack_from_suitesparse.patch new file mode 100644 index 0000000000000000000000000000000000000000..9defc555276fbe278ef1064390cab483e7ea62dd --- /dev/null +++ b/var/spack/repos/builtin/packages/trilinos/umfpack_from_suitesparse.patch @@ -0,0 +1,12 @@ +diff --git a/cmake/TPLs/FindTPLUMFPACK.cmake b/cmake/TPLs/FindTPLUMFPACK.cmake +index 963eb71..998cd02 100644 +--- a/cmake/TPLs/FindTPLUMFPACK.cmake ++++ b/cmake/TPLs/FindTPLUMFPACK.cmake +@@ -55,6 +55,6 @@ + + + TRIBITS_TPL_FIND_INCLUDE_DIRS_AND_LIBRARIES( UMFPACK +- REQUIRED_HEADERS umfpack.h amd.h UFconfig.h ++ REQUIRED_HEADERS umfpack.h amd.h SuiteSparse_config.h + REQUIRED_LIBS_NAMES umfpack amd + ) diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index 4a27a8fedba1c06705f5a1e603f4b29713b5b5cb..24382af406cf975796b3df398a622d489e8f4d98 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -7,11 +7,23 @@ class Vtk(Package): homepage = "http://www.vtk.org" url = "http://www.vtk.org/files/release/6.1/VTK-6.1.0.tar.gz" + version("7.0.0", "5fe35312db5fb2341139b8e4955c367d", url="http://www.vtk.org/files/release/7.0/VTK-7.0.0.tar.gz") + + version("6.3.0", '0231ca4840408e9dd60af48b314c5b6d', url="http://www.vtk.org/files/release/6.3/VTK-6.3.0.tar.gz") + version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d') depends_on("qt") + # VTK7 defaults to OpenGL2 rendering backend + variant('opengl2', default=True, description='Build with OpenGL instead of OpenGL2 as rendering backend') + def install(self, spec, prefix): + def feature_to_bool(feature, on='ON', off='OFF'): + if feature in spec: + return on + return off + with working_dir('spack-build', create=True): cmake_args = [ "..", @@ -35,6 +47,12 @@ def install(self, spec, prefix): if spec['qt'].satisfies('@5'): cmake_args.append("-DVTK_QT_VERSION:STRING=5") + if spec.satisfies("@6.1.0"): + cmake_args.append("-DCMAKE_C_FLAGS=-DGLX_GLXEXT_LEGACY") + cmake_args.append("-DCMAKE_CXX_FLAGS=-DGLX_GLXEXT_LEGACY") + + cmake_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) + cmake(*cmake_args) make() make("install") diff --git a/var/spack/repos/builtin/packages/zoltan/package.py b/var/spack/repos/builtin/packages/zoltan/package.py new file mode 100644 index 0000000000000000000000000000000000000000..e20ae81adbe2f1b74888fddffca1d1ae0411dfdf --- /dev/null +++ b/var/spack/repos/builtin/packages/zoltan/package.py @@ -0,0 +1,54 @@ +from spack import * + +class Zoltan(Package): + """The Zoltan library is a toolkit of parallel combinatorial algorithms for + parallel, unstructured, and/or adaptive scientific applications. Zoltan's + largest component is a suite of dynamic load-balancing and paritioning + algorithms that increase applications' parallel performance by reducing + idle time. Zoltan also has graph coloring and graph ordering algorithms, + which are useful in task schedulers and parallel preconditioners.""" + + homepage = "http://www.cs.sandia.gov/zoltan" + base_url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions" + + version('3.83', '1ff1bc93f91e12f2c533ddb01f2c095f') + version('3.3', '5eb8f00bda634b25ceefa0122bd18d65') + + variant('fortran', default=True, description='Enable Fortran support') + variant('mpi', default=False, description='Enable MPI support') + + depends_on('mpi', when='+mpi') + + def install(self, spec, prefix): + config_args = [ + '--enable-f90interface' if '+fortan' in spec else '--disable-f90interface', + '--enable-mpi' if '+mpi' in spec else '--disable-mpi', + ] + + if '+mpi' in spec: + config_args.append('--with-mpi=%s' % spec['mpi'].prefix) + config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin) + config_args.append('CC=%s/mpicc' % spec['mpi'].prefix.bin) + config_args.append('CXX=%s/mpicxx' % spec['mpi'].prefix.bin) + + # NOTE: Early versions of Zoltan come packaged with a few embedded + # library packages (e.g. ParMETIS, Scotch), which messes with Spack's + # ability to descend directly into the package's source directory. + if spec.satisfies('@:3.3'): + cd('Zoltan_v%s' % self.version) + + mkdirp('build') + cd('build') + + config_zoltan = Executable('../configure') + config_zoltan('--prefix=%s' % pwd(), *config_args) + + make() + make('install') + + mkdirp(prefix) + move('include', prefix) + move('lib', prefix) + + def url_for_version(self, version): + return '%s/zoltan_distrib_v%s.tar.gz' % (Zoltan.base_url, version)