Skip to content
Snippets Groups Projects
Commit 43ca8052 authored by Todd Gamblin's avatar Todd Gamblin
Browse files

Factor out canonical_deptype function, cleanup spec.py

parent cd960caf
No related branches found
No related tags found
No related merge requests found
...@@ -123,6 +123,39 @@ ...@@ -123,6 +123,39 @@
from spack.version import * from spack.version import *
from spack.provider_index import ProviderIndex from spack.provider_index import ProviderIndex
__all__ = [
'Spec',
'alldeps',
'nolink',
'nobuild',
'canonical_deptype',
'validate_deptype',
'parse',
'parse_anonymous_spec',
'SpecError',
'SpecParseError',
'DuplicateDependencyError',
'DuplicateVariantError',
'DuplicateCompilerSpecError',
'UnsupportedCompilerError',
'UnknownVariantError',
'DuplicateArchitectureError',
'InconsistentSpecError',
'InvalidDependencyError',
'InvalidDependencyTypeError',
'NoProviderError',
'MultipleProviderError',
'UnsatisfiableSpecError',
'UnsatisfiableSpecNameError',
'UnsatisfiableVersionSpecError',
'UnsatisfiableCompilerSpecError',
'UnsatisfiableVariantSpecError',
'UnsatisfiableCompilerFlagSpecError',
'UnsatisfiableArchitectureSpecError',
'UnsatisfiableProviderSpecError',
'UnsatisfiableDependencySpecError',
'SpackYAMLError',
'AmbiguousHashError']
# Valid pattern for an identifier in Spack # Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*' identifier_re = r'\w[\w-]*'
...@@ -156,12 +189,45 @@ ...@@ -156,12 +189,45 @@
# Special types of dependencies. # Special types of dependencies.
alldeps = ('build', 'link', 'run') alldeps = ('build', 'link', 'run')
nolink = ('build', 'run') nolink = ('build', 'run')
nobuild = ('link', 'run')
norun = ('link', 'build')
special_types = { special_types = {
'alldeps': alldeps, 'alldeps': alldeps,
'nolink': nolink, 'nolink': nolink,
'nobuild': nobuild,
'norun': norun,
} }
legal_deps = tuple(special_types) + alldeps
def validate_deptype(deptype):
if isinstance(deptype, str):
if deptype not in legal_deps:
raise InvalidDependencyTypeError(
"Invalid dependency type: %s" % deptype)
elif isinstance(deptype, (list, tuple)):
for t in deptype:
validate_deptype(t)
elif deptype is None:
raise InvalidDependencyTypeError("deptype cannot be None!")
def canonical_deptype(deptype):
if deptype is None:
return alldeps
elif isinstance(deptype, str):
return special_types.get(deptype, (deptype,))
elif isinstance(deptype, (tuple, list)):
return (sum((canonical_deptype(d) for d in deptype), ()))
return deptype
def colorize_spec(spec): def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in """Returns a spec colorized according to the colors specified in
...@@ -542,17 +608,8 @@ def get_dependency(self, name): ...@@ -542,17 +608,8 @@ def get_dependency(self, name):
raise InvalidDependencyException( raise InvalidDependencyException(
self.name + " does not depend on " + comma_or(name)) self.name + " does not depend on " + comma_or(name))
def _deptype_norm(self, deptype):
if deptype is None:
return alldeps
# Force deptype to be a set object so that we can do set intersections.
if isinstance(deptype, str):
# Support special deptypes.
return special_types.get(deptype, (deptype,))
return deptype
def _find_deps(self, where, deptype): def _find_deps(self, where, deptype):
deptype = self._deptype_norm(deptype) deptype = canonical_deptype(deptype)
return [dep.spec return [dep.spec
for dep in where.values() for dep in where.values()
...@@ -565,7 +622,7 @@ def dependents(self, deptype=None): ...@@ -565,7 +622,7 @@ def dependents(self, deptype=None):
return self._find_deps(self._dependents, deptype) return self._find_deps(self._dependents, deptype)
def _find_deps_dict(self, where, deptype): def _find_deps_dict(self, where, deptype):
deptype = self._deptype_norm(deptype) deptype = canonical_deptype(deptype)
return dict((dep.spec.name, dep) return dict((dep.spec.name, dep)
for dep in where.values() for dep in where.values()
...@@ -2718,6 +2775,10 @@ class InvalidDependencyError(SpecError): ...@@ -2718,6 +2775,10 @@ class InvalidDependencyError(SpecError):
of the package.""" of the package."""
class InvalidDependencyTypeError(SpecError):
"""Raised when a dependency type is not a legal Spack dep type."""
class NoProviderError(SpecError): class NoProviderError(SpecError):
"""Raised when there is no package that provides a particular """Raised when there is no package that provides a particular
virtual dependency. virtual dependency.
...@@ -2804,8 +2865,6 @@ def __init__(self, provided, required): ...@@ -2804,8 +2865,6 @@ def __init__(self, provided, required):
# TODO: get rid of this and be more specific about particular incompatible # TODO: get rid of this and be more specific about particular incompatible
# dep constraints # dep constraints
class UnsatisfiableDependencySpecError(UnsatisfiableSpecError): class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
"""Raised when some dependency of constrained specs are incompatible""" """Raised when some dependency of constrained specs are incompatible"""
def __init__(self, provided, required): def __init__(self, provided, required):
......
...@@ -24,34 +24,34 @@ ...@@ -24,34 +24,34 @@
############################################################################## ##############################################################################
import unittest import unittest
import spack.spec import spack.spec as sp
from spack.parse import Token from spack.parse import Token
from spack.spec import * from spack.spec import *
# Sample output for a complex lexing. # Sample output for a complex lexing.
complex_lex = [Token(ID, 'mvapich_foo'), complex_lex = [Token(sp.ID, 'mvapich_foo'),
Token(DEP), Token(sp.DEP),
Token(ID, '_openmpi'), Token(sp.ID, '_openmpi'),
Token(AT), Token(sp.AT),
Token(ID, '1.2'), Token(sp.ID, '1.2'),
Token(COLON), Token(sp.COLON),
Token(ID, '1.4'), Token(sp.ID, '1.4'),
Token(COMMA), Token(sp.COMMA),
Token(ID, '1.6'), Token(sp.ID, '1.6'),
Token(PCT), Token(sp.PCT),
Token(ID, 'intel'), Token(sp.ID, 'intel'),
Token(AT), Token(sp.AT),
Token(ID, '12.1'), Token(sp.ID, '12.1'),
Token(COLON), Token(sp.COLON),
Token(ID, '12.6'), Token(sp.ID, '12.6'),
Token(ON), Token(sp.ON),
Token(ID, 'debug'), Token(sp.ID, 'debug'),
Token(OFF), Token(sp.OFF),
Token(ID, 'qt_4'), Token(sp.ID, 'qt_4'),
Token(DEP), Token(sp.DEP),
Token(ID, 'stackwalker'), Token(sp.ID, 'stackwalker'),
Token(AT), Token(sp.AT),
Token(ID, '8.1_1e')] Token(sp.ID, '8.1_1e')]
class SpecSyntaxTest(unittest.TestCase): class SpecSyntaxTest(unittest.TestCase):
...@@ -74,16 +74,16 @@ def check_parse(self, expected, spec=None, remove_arch=True): ...@@ -74,16 +74,16 @@ def check_parse(self, expected, spec=None, remove_arch=True):
""" """
if spec is None: if spec is None:
spec = expected spec = expected
output = spack.spec.parse(spec) output = sp.parse(spec)
parsed = (" ".join(str(spec) for spec in output)) parsed = (" ".join(str(spec) for spec in output))
self.assertEqual(expected, parsed) self.assertEqual(expected, parsed)
def check_lex(self, tokens, spec): def check_lex(self, tokens, spec):
"""Check that the provided spec parses to the provided token list.""" """Check that the provided spec parses to the provided token list."""
lex_output = SpecLexer().lex(spec) lex_output = sp.SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output): for tok, spec_tok in zip(tokens, lex_output):
if tok.type == ID: if tok.type == sp.ID:
self.assertEqual(tok, spec_tok) self.assertEqual(tok, spec_tok)
else: else:
# Only check the type for non-identifiers. # Only check the type for non-identifiers.
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment