From 336191c25145823be104011fda23651e6e6aa723 Mon Sep 17 00:00:00 2001
From: Greg Becker <becker33@llnl.gov>
Date: Tue, 31 Mar 2020 16:09:08 -0700
Subject: [PATCH] packages.yaml: allow virtuals to specify buildable: false
 (#14934)

Currently, to force Spack to use an external MPI, you have to specify `buildable: False`
for every MPI provider in Spack in your packages.yaml file. This is both tedious and
fragile, as new MPI providers can be added and break your workflow when you do a
git pull.

This PR allows you to specify an entire virtual dependency as non-buildable, and
specify particular implementations to be built:

```
packages:
all:
    providers:
        mpi: [mpich]
mpi:
    buildable: false
    paths:
        mpich@3.2 %gcc@7.3.0: /usr/packages/mpich-3.2-gcc-7.3.0
```
will force all Spack builds to use the specified `mpich` install.
---
 lib/spack/docs/build_settings.rst             | 33 +++++++
 lib/spack/spack/package.py                    |  8 ++
 lib/spack/spack/package_prefs.py              | 78 ++++++----------
 .../spack/test/concretize_preferences.py      | 51 ++++++-----
 lib/spack/spack/test/conftest.py              |  7 ++
 lib/spack/spack/test/mirror.py                | 90 +++++++++----------
 lib/spack/spack/test/module_parsing.py        | 30 ++-----
 lib/spack/spack/test/spec_dag.py              |  2 -
 lib/spack/spack/util/module_cmd.py            |  9 +-
 9 files changed, 155 insertions(+), 153 deletions(-)

diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst
index b141f2b717..cfd850af28 100644
--- a/lib/spack/docs/build_settings.rst
+++ b/lib/spack/docs/build_settings.rst
@@ -124,6 +124,39 @@ The ``buildable`` does not need to be paired with external packages.
 It could also be used alone to forbid packages that may be
 buggy or otherwise undesirable.
 
+Virtual packages in Spack can also be specified as not buildable, and
+external implementations can be provided. In the example above,
+OpenMPI is configured as not buildable, but Spack will often prefer
+other MPI implementations over the externally available OpenMPI. Spack
+can be configured with every MPI provider not buildable individually,
+but more conveniently:
+
+.. code-block:: yaml
+
+   packages:
+     mpi:
+       buildable: False
+     openmpi:
+       paths:
+         openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
+         openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
+         openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
+
+Implementations can also be listed immediately under the virtual they provide:
+
+.. code-block:: yaml
+
+   packages:
+     mpi:
+       buildable: False
+         openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
+         openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
+         openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
+         mpich@3.3 %clang@9.0.0 arch=linux-debian7-x86_64: /opt/mpich-3.3-intel
+
+Spack can then use any of the listed external implementations of MPI
+to satisfy a dependency, and will choose depending on the compiler and
+architecture.
 
 .. _concretization-preferences:
 
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 9156d7d0dd..b8ded0364b 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -1035,6 +1035,14 @@ def provides(self, vpkg_name):
             for s, constraints in self.provided.items() if s.name == vpkg_name
         )
 
+    @property
+    def virtuals_provided(self):
+        """
+        virtual packages provided by this package with its spec
+        """
+        return [vspec for vspec, constraints in self.provided.items()
+                if any(self.spec.satisfies(c) for c in constraints)]
+
     @property
     def installed(self):
         """Installation status of a package.
diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py
index 2801a6d123..0158c7063a 100644
--- a/lib/spack/spack/package_prefs.py
+++ b/lib/spack/spack/package_prefs.py
@@ -6,7 +6,6 @@
 import stat
 
 from six import string_types
-from six import iteritems
 
 import spack.repo
 import spack.error
@@ -23,27 +22,6 @@ def _spec_type(component):
     return _lesser_spec_types.get(component, spack.spec.Spec)
 
 
-def get_packages_config():
-    """Wrapper around get_packages_config() to validate semantics."""
-    config = spack.config.get('packages')
-
-    # Get a list of virtuals from packages.yaml.  Note that because we
-    # check spack.repo, this collects virtuals that are actually provided
-    # by sometihng, not just packages/names that don't exist.
-    # So, this won't include, e.g., 'all'.
-    virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config
-                if spack.repo.path.is_virtual(pkg_name)]
-
-    # die if there are virtuals in `packages.py`
-    if virtuals:
-        errors = ["%s: %s" % (line_info, name) for name, line_info in virtuals]
-        raise VirtualInPackagesYAMLError(
-            "packages.yaml entries cannot be virtual packages:",
-            '\n'.join(errors))
-
-    return config
-
-
 class PackagePrefs(object):
     """Defines the sort order for a set of specs.
 
@@ -116,7 +94,7 @@ def order_for_package(cls, pkgname, component, vpkg=None, all=True):
             pkglist.append('all')
 
         for pkg in pkglist:
-            pkg_entry = get_packages_config().get(pkg)
+            pkg_entry = spack.config.get('packages').get(pkg)
             if not pkg_entry:
                 continue
 
@@ -160,7 +138,8 @@ def has_preferred_targets(cls, pkg_name):
     def preferred_variants(cls, pkg_name):
         """Return a VariantMap of preferred variants/values for a spec."""
         for pkg in (pkg_name, 'all'):
-            variants = get_packages_config().get(pkg, {}).get('variants', '')
+            variants = spack.config.get('packages').get(pkg, {}).get(
+                'variants', '')
             if variants:
                 break
 
@@ -181,33 +160,29 @@ def spec_externals(spec):
     # break circular import.
     from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401
 
-    allpkgs = get_packages_config()
-    name = spec.name
+    allpkgs = spack.config.get('packages')
+    names = set([spec.name])
+    names |= set(vspec.name for vspec in spec.package.virtuals_provided)
 
     external_specs = []
-    pkg_paths = allpkgs.get(name, {}).get('paths', None)
-    pkg_modules = allpkgs.get(name, {}).get('modules', None)
-    if (not pkg_paths) and (not pkg_modules):
-        return []
-
-    for external_spec, path in iteritems(pkg_paths):
-        if not path:
-            # skip entries without paths (avoid creating extra Specs)
+    for name in names:
+        pkg_config = allpkgs.get(name, {})
+        pkg_paths = pkg_config.get('paths', {})
+        pkg_modules = pkg_config.get('modules', {})
+        if (not pkg_paths) and (not pkg_modules):
             continue
 
-        external_spec = spack.spec.Spec(external_spec,
-                                        external_path=canonicalize_path(path))
-        if external_spec.satisfies(spec):
-            external_specs.append(external_spec)
-
-    for external_spec, module in iteritems(pkg_modules):
-        if not module:
-            continue
+        for external_spec, path in pkg_paths.items():
+            external_spec = spack.spec.Spec(
+                external_spec, external_path=canonicalize_path(path))
+            if external_spec.satisfies(spec):
+                external_specs.append(external_spec)
 
-        external_spec = spack.spec.Spec(
-            external_spec, external_module=module)
-        if external_spec.satisfies(spec):
-            external_specs.append(external_spec)
+        for external_spec, module in pkg_modules.items():
+            external_spec = spack.spec.Spec(
+                external_spec, external_module=module)
+            if external_spec.satisfies(spec):
+                external_specs.append(external_spec)
 
     # defensively copy returned specs
     return [s.copy() for s in external_specs]
@@ -215,12 +190,11 @@ def spec_externals(spec):
 
 def is_spec_buildable(spec):
     """Return true if the spec pkgspec is configured as buildable"""
-    allpkgs = get_packages_config()
-    if spec.name not in allpkgs:
-        return True
-    if 'buildable' not in allpkgs[spec.name]:
-        return True
-    return allpkgs[spec.name]['buildable']
+    allpkgs = spack.config.get('packages')
+    do_not_build = [name for name, entry in allpkgs.items()
+                    if not entry.get('buildable', True)]
+    return not (spec.name in do_not_build or
+                any(spec.package.provides(name) for name in do_not_build))
 
 
 def get_package_dir_permissions(spec):
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index 81b5360869..922d5a11d8 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -185,36 +185,39 @@ def test_develop(self):
         spec.concretize()
         assert spec.version == Version('0.2.15.develop')
 
-    def test_no_virtuals_in_packages_yaml(self):
-        """Verify that virtuals are not allowed in packages.yaml."""
+    def test_external_mpi(self):
+        # make sure this doesn't give us an external first.
+        spec = Spec('mpi')
+        spec.concretize()
+        assert not spec['mpi'].external
 
-        # set up a packages.yaml file with a vdep as a key.  We use
-        # syaml.load_config here to make sure source lines in the config are
-        # attached to parsed strings, as the error message uses them.
+        # load config
         conf = syaml.load_config("""\
-mpi:
+all:
+    providers:
+        mpi: [mpich]
+mpich:
+    buildable: false
     paths:
-      mpi-with-lapack@2.1: /path/to/lapack
+        mpich@3.0.4: /dummy/path
 """)
         spack.config.set('packages', conf, scope='concretize')
 
-        # now when we get the packages.yaml config, there should be an error
-        with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
-            spack.package_prefs.get_packages_config()
-
-    def test_all_is_not_a_virtual(self):
-        """Verify that `all` is allowed in packages.yaml."""
-        conf = syaml.load_config("""\
-all:
-        variants: [+mpi]
-""")
-        spack.config.set('packages', conf, scope='concretize')
+        # ensure that once config is in place, external is used
+        spec = Spec('mpi')
+        spec.concretize()
+        assert spec['mpich'].external_path == '/dummy/path'
 
-        # should be no error for 'all':
-        spack.package_prefs.get_packages_config()
+    def test_external_module(self, monkeypatch):
+        """Test that packages can find externals specified by module
 
-    def test_external_mpi(self):
+        The specific code for parsing the module is tested elsewhere.
+        This just tests that the preference is accounted for"""
         # make sure this doesn't give us an external first.
+        def mock_module(cmd, module):
+            return 'prepend-path PATH /dummy/path'
+        monkeypatch.setattr(spack.util.module_cmd, 'module', mock_module)
+
         spec = Spec('mpi')
         spec.concretize()
         assert not spec['mpi'].external
@@ -224,10 +227,10 @@ def test_external_mpi(self):
 all:
     providers:
         mpi: [mpich]
-mpich:
+mpi:
     buildable: false
-    paths:
-        mpich@3.0.4: /dummy/path
+    modules:
+        mpich@3.0.4: dummy
 """)
         spack.config.set('packages', conf, scope='concretize')
 
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index a186463907..8912c0219b 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -1040,6 +1040,13 @@ def __init__(self, name, dependencies, dependency_types, conditions=None,
         self.conflicts = {}
         self.patches = {}
 
+    def provides(self, vname):
+        return vname in self.provided
+
+    @property
+    def virtuals_provided(self):
+        return [v.name for v, c in self.provided]
+
 
 class MockPackageMultiRepo(object):
     def __init__(self, packages):
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index 570c329b71..05cf46dc20 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -52,52 +52,52 @@ def check_mirror():
         mirror_root = os.path.join(stage.path, 'test-mirror')
         # register mirror with spack config
         mirrors = {'spack-mirror-test': 'file://' + mirror_root}
-        spack.config.set('mirrors', mirrors)
-        with spack.config.override('config:checksum', False):
-            specs = [Spec(x).concretized() for x in repos]
-            spack.mirror.create(mirror_root, specs)
-
-        # Stage directory exists
-        assert os.path.isdir(mirror_root)
-
-        for spec in specs:
-            fetcher = spec.package.fetcher[0]
-            per_package_ref = os.path.join(
-                spec.name, '-'.join([spec.name, str(spec.version)]))
-            mirror_paths = spack.mirror.mirror_archive_paths(
-                fetcher,
-                per_package_ref)
-            expected_path = os.path.join(
-                mirror_root, mirror_paths.storage_path)
-            assert os.path.exists(expected_path)
-
-        # Now try to fetch each package.
-        for name, mock_repo in repos.items():
-            spec = Spec(name).concretized()
-            pkg = spec.package
-
+        with spack.config.override('mirrors', mirrors):
             with spack.config.override('config:checksum', False):
-                with pkg.stage:
-                    pkg.do_stage(mirror_only=True)
-
-                    # Compare the original repo with the expanded archive
-                    original_path = mock_repo.path
-                    if 'svn' in name:
-                        # have to check out the svn repo to compare.
-                        original_path = os.path.join(
-                            mock_repo.path, 'checked_out')
-
-                        svn = which('svn', required=True)
-                        svn('checkout', mock_repo.url, original_path)
-
-                    dcmp = filecmp.dircmp(
-                        original_path, pkg.stage.source_path)
-
-                    # make sure there are no new files in the expanded
-                    # tarball
-                    assert not dcmp.right_only
-                    # and that all original files are present.
-                    assert all(l in exclude for l in dcmp.left_only)
+                specs = [Spec(x).concretized() for x in repos]
+                spack.mirror.create(mirror_root, specs)
+
+            # Stage directory exists
+            assert os.path.isdir(mirror_root)
+
+            for spec in specs:
+                fetcher = spec.package.fetcher[0]
+                per_package_ref = os.path.join(
+                    spec.name, '-'.join([spec.name, str(spec.version)]))
+                mirror_paths = spack.mirror.mirror_archive_paths(
+                    fetcher,
+                    per_package_ref)
+                expected_path = os.path.join(
+                    mirror_root, mirror_paths.storage_path)
+                assert os.path.exists(expected_path)
+
+            # Now try to fetch each package.
+            for name, mock_repo in repos.items():
+                spec = Spec(name).concretized()
+                pkg = spec.package
+
+                with spack.config.override('config:checksum', False):
+                    with pkg.stage:
+                        pkg.do_stage(mirror_only=True)
+
+                        # Compare the original repo with the expanded archive
+                        original_path = mock_repo.path
+                        if 'svn' in name:
+                            # have to check out the svn repo to compare.
+                            original_path = os.path.join(
+                                mock_repo.path, 'checked_out')
+
+                            svn = which('svn', required=True)
+                            svn('checkout', mock_repo.url, original_path)
+
+                        dcmp = filecmp.dircmp(
+                            original_path, pkg.stage.source_path)
+
+                        # make sure there are no new files in the expanded
+                        # tarball
+                        assert not dcmp.right_only
+                        # and that all original files are present.
+                        assert all(l in exclude for l in dcmp.left_only)
 
 
 def test_url_mirror(mock_archive):
diff --git a/lib/spack/spack/test/module_parsing.py b/lib/spack/spack/test/module_parsing.py
index bbe18b1ad0..0bf485913f 100644
--- a/lib/spack/spack/test/module_parsing.py
+++ b/lib/spack/spack/test/module_parsing.py
@@ -20,28 +20,11 @@
                      'setenv LDFLAGS -L/path/to/lib',
                      'prepend-path PATH /path/to/bin']
 
+_test_template = "'. %s 2>&1' % args[1]"
 
-@pytest.fixture
-def module_function_test_mode():
-    old_mode = spack.util.module_cmd._test_mode
-    spack.util.module_cmd._test_mode = True
 
-    yield
-
-    spack.util.module_cmd._test_mode = old_mode
-
-
-@pytest.fixture
-def save_module_func():
-    old_func = spack.util.module_cmd.module
-
-    yield
-
-    spack.util.module_cmd.module = old_func
-
-
-def test_module_function_change_env(tmpdir, working_env,
-                                    module_function_test_mode):
+def test_module_function_change_env(tmpdir, working_env, monkeypatch):
+    monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template)
     src_file = str(tmpdir.join('src_me'))
     with open(src_file, 'w') as f:
         f.write('export TEST_MODULE_ENV_VAR=TEST_SUCCESS\n')
@@ -53,7 +36,8 @@ def test_module_function_change_env(tmpdir, working_env,
     assert os.environ['NOT_AFFECTED'] == "NOT_AFFECTED"
 
 
-def test_module_function_no_change(tmpdir, module_function_test_mode):
+def test_module_function_no_change(tmpdir, monkeypatch):
+    monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template)
     src_file = str(tmpdir.join('src_me'))
     with open(src_file, 'w') as f:
         f.write('echo TEST_MODULE_FUNCTION_PRINT')
@@ -65,11 +49,11 @@ def test_module_function_no_change(tmpdir, module_function_test_mode):
     assert os.environ == old_env
 
 
-def test_get_path_from_module_faked(save_module_func):
+def test_get_path_from_module_faked(monkeypatch):
     for line in test_module_lines:
         def fake_module(*args):
             return line
-        spack.util.module_cmd.module = fake_module
+        monkeypatch.setattr(spack.util.module_cmd, 'module', fake_module)
 
         path = get_path_from_module('mod')
         assert path == '/path/to'
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 25917f9424..e031f02c25 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -387,7 +387,6 @@ def test_unsatisfiable_architecture(self, set_dependency):
         with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError):
             spec.normalize()
 
-    @pytest.mark.usefixtures('config')
     def test_invalid_dep(self):
         spec = Spec('libelf ^mpich')
         with pytest.raises(spack.spec.InvalidDependencyError):
@@ -602,7 +601,6 @@ def test_copy_normalized(self):
         copy_ids = set(id(s) for s in copy.traverse())
         assert not orig_ids.intersection(copy_ids)
 
-    @pytest.mark.usefixtures('config')
     def test_copy_concretized(self):
         orig = Spec('mpileaks')
         orig.concretize()
diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py
index 0edf7e6102..74790156ae 100644
--- a/lib/spack/spack/util/module_cmd.py
+++ b/lib/spack/spack/util/module_cmd.py
@@ -19,16 +19,11 @@
 # If we need another option that changes the environment, add it here.
 module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse']
 py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'"
-
-# This is just to enable testing. I hate it but we can't find a better way
-_test_mode = False
+_cmd_template = "'module ' + ' '.join(args) + ' 2>&1'"
 
 
 def module(*args):
-    module_cmd = 'module ' + ' '.join(args) + ' 2>&1'
-    if _test_mode:
-        tty.warn('module function operating in test mode')
-        module_cmd = ". %s 2>&1" % args[1]
+    module_cmd = eval(_cmd_template)  # So we can monkeypatch for testing
     if args[0] in module_change_commands:
         # Do the module manipulation, then output the environment in JSON
         # and read the JSON back in the parent process to update os.environ
-- 
GitLab