diff --git a/.flake8 b/.flake8
index a1e2fcc1f8abc35423366b1b9709834a1809097e..286522bc48a0ceada57f65d19eba527d12280608 100644
--- a/.flake8
+++ b/.flake8
@@ -8,6 +8,9 @@
 # - E221: multiple spaces before operator
 # - E241: multiple spaces after ‘,’
 #
+# Let people use terse Python features:
+# - E731 : lambda expressions
+#
 # Spack allows wildcard imports:
 # - F403: disable wildcard import
 #
@@ -16,5 +19,5 @@
 # - F999: name name be undefined or undefined from star imports.
 #
 [flake8]
-ignore = E221,E241,F403,F821,F999
+ignore = E221,E241,E731,F403,F821,F999,F405
 max-line-length = 79
diff --git a/bin/spack b/bin/spack
index 3544feb10ac42a2e3a15a123c027dee0935f7ea9..9b1276a866639e385b7378b4b08ddae8e5c9ece4 100755
--- a/bin/spack
+++ b/bin/spack
@@ -138,6 +138,9 @@ def main():
         import spack.util.debug as debug
         debug.register_interrupt_handler()
 
+    from spack.yaml_version_check import check_yaml_versions
+    check_yaml_versions()
+
     spack.spack_working_dir = working_dir
     if args.mock:
         from spack.repository import RepoPath
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 6efed836219d8ee9a02e564e2a90e24f82dc43c9..50c48b802bbea0881e04cf83f7d3630d080a1e56 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -102,8 +102,8 @@ that the packages is installed:
    ==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
    ==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
    ######################################################################## 100.0%
-   ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
-   ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
+   ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
+   ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23.
    ==> No patches needed for mpileaks.
    ==> Building mpileaks.
 
@@ -132,10 +132,10 @@ sites, as installing a version that one user needs will not disrupt
 existing installations for other users.
 
 In addition to different versions, Spack can customize the compiler,
-compile-time options (variants), and platform (for cross compiles) of
-an installation.  Spack is unique in that it can also configure the
-*dependencies* a package is built with.  For example, two
-configurations of the same version of a package, one built with boost
+compile-time options (variants), compiler flags, and platform (for
+cross compiles) of an installation.  Spack is unique in that it can
+also configure the *dependencies* a package is built with.  For example,
+two configurations of the same version of a package, one built with boost
 1.39.0, and the other version built with version 1.43.0, can coexist.
 
 This can all be done on the command line using the *spec* syntax.
@@ -334,9 +334,15 @@ of libelf would look like this:
    -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
    libdwarf@20130729-d9b90962
 
+We can also search for packages that have a certain attribute. For example,
+``spack find -l libdwarf +debug`` will show only installations of libdwarf
+with the 'debug' compile-time option enabled, while ``spack find -l +debug``
+will find every installed package with a 'debug' compile-time option enabled.
+
 The full spec syntax is discussed in detail in :ref:`sec-specs`.
 
 
+
 Compiler configuration
 -----------------------------------
 
@@ -463,6 +469,26 @@ For compilers, like ``clang``, that do not support Fortran, put
 Once you save the file, the configured compilers will show up in the
 list displayed by ``spack compilers``.
 
+You can also add compiler flags to manually configured compilers. The
+valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``,
+``ldflags``, and ``ldlibs``. For example,::
+
+    ...
+    chaos_5_x86_64_ib:
+      ...
+      intel@15.0.0:
+          cc: /usr/local/bin/icc-15.0.024-beta
+          cxx: /usr/local/bin/icpc-15.0.024-beta
+          f77: /usr/local/bin/ifort-15.0.024-beta
+          fc: /usr/local/bin/ifort-15.0.024-beta
+          cppflags: -O3 -fPIC
+      ...
+
+These flags will be treated by spack as if they were enterred from
+the command line each time this compiler is used. The compiler wrappers
+then inject those flags into the compiler command. Compiler flags
+enterred from the command line will be discussed in more detail in the
+following section.
 
 .. _sec-specs:
 
@@ -480,7 +506,7 @@ the full syntax of specs.
 
 Here is an example of a much longer spec than we've seen thus far::
 
-   mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt =bgqos_0 ^callpath @1.1 %gcc@4.7.2
+   mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
 
 If provided to ``spack install``, this will install the ``mpileaks``
 library at some version between ``1.2`` and ``1.4`` (inclusive),
@@ -498,8 +524,12 @@ More formally, a spec consists of the following pieces:
 * ``%`` Optional compiler specifier, with an optional compiler version
   (``gcc`` or ``gcc@4.7.3``)
 * ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
-  ``-qt``, or ``~qt``)
-* ``=`` Optional architecture specifier (``bgqos_0``)
+  ``-qt``, or ``~qt``) for boolean variants
+* ``name=<value>`` Optional variant specifiers that are not restricted to
+boolean variants
+* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
+``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
+* ``arch=<value>`` Optional architecture specifier (``arch=bgq_os``)
 * ``^`` Dependency specs (``^callpath@1.1``)
 
 There are two things to notice here.  The first is that specs are
@@ -579,7 +609,7 @@ compilers, variants, and architectures just like any other spec.
 Specifiers are associated with the nearest package name to their left.
 For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
 ``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
-``-qt``, and ``=bgqos_0`` all associate with the ``mpileaks`` package.
+``-qt``, and ``arch=bgq_os`` all associate with the ``mpileaks`` package.
 
 In the diagram above, ``mpileaks`` depends on ``mpich`` with an
 unspecified version, but packages can depend on other packages with
@@ -635,22 +665,25 @@ based on site policies.
 Variants
 ~~~~~~~~~~~~~~~~~~~~~~~
 
-.. Note::
-
-   Variants are not yet supported, but will be in the next Spack
-   release (0.9), due in Q2 2015.
-
-Variants are named options associated with a particular package, and
-they can be turned on or off.  For example, above, supplying
-``+debug`` causes ``mpileaks`` to be built with debug flags.  The
-names of particular variants available for a package depend on what
-was provided by the package author.  ``spack info <package>`` will
+Variants are named options associated with a particular package. They are
+optional, as each package must provide default values for each variant it
+makes available. Variants can be specified using
+a flexible parameter syntax ``name=<value>``. For example,
+``spack install libelf debug=True`` will install libelf build with debug
+flags. The names of particular variants available for a package depend on
+what was provided by the package author. ``spack into <package>`` will
 provide information on what build variants are available.
 
-Depending on the package a variant may be on or off by default.  For
-``mpileaks`` here, ``debug`` is off by default, and we turned it on
-with ``+debug``.  If a package is on by default you can turn it off by
-either adding ``-name`` or ``~name`` to the spec.
+For compatibility with earlier versions, variants which happen to be
+boolean in nature can be specified by a syntax that represents turning
+options on and off. For example, in the previous spec we could have
+supplied ``libelf +debug`` with the same effect of enabling the debug
+compile time option for the libelf package.
+
+Depending on the package a variant may have any default value.  For
+``libelf`` here, ``debug`` is ``False`` by default, and we turned it on
+with ``debug=True`` or ``+debug``.  If a package is ``True`` by default
+you can turn it off by either adding ``-name`` or ``~name`` to the spec.
 
 There are two syntaxes here because, depending on context, ``~`` and
 ``-`` may mean different things.  In most shells, the following will
@@ -662,7 +695,7 @@ result in the shell performing home directory substitution:
    mpileaks~debug    # use this instead
 
 If there is a user called ``debug``, the ``~`` will be incorrectly
-expanded.  In this situation, you would want to write ``mpileaks
+expanded.  In this situation, you would want to write ``libelf
 -debug``.  However, ``-`` can be ambiguous when included after a
 package name without spaces:
 
@@ -677,12 +710,35 @@ package, not a request for ``mpileaks`` built without ``debug``
 options.  In this scenario, you should write ``mpileaks~debug`` to
 avoid ambiguity.
 
-When spack normalizes specs, it prints them out with no spaces and
-uses only ``~`` for disabled variants.  We allow ``-`` and spaces on
-the command line is provided for convenience and legibility.
+When spack normalizes specs, it prints them out with no spaces boolean
+variants using the backwards compatibility syntax and uses only ``~``
+for disabled boolean variants.  We allow ``-`` and spaces on the command
+line is provided for convenience and legibility.
+
 
+Compiler Flags
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Compiler flags are specified using the same syntax as non-boolean variants,
+but fulfill a different purpose. While the function of a variant is set by
+the package, compiler flags are used by the compiler wrappers to inject
+flags into the compile line of the build. Additionally, compiler flags are
+inherited by dependencies. ``spack install libdwarf cppflags=\"-g\"`` will
+install both libdwarf and libelf with the ``-g`` flag injected into their
+compile line.
+
+Notice that the value of the compiler flags must be escape quoted on the
+command line. From within python files, the same spec would be specified
+``libdwarf cppflags="-g"``. This is necessary because of how the shell
+handles the quote symbols.
 
-Architecture specifier
+The six compiler flags are injected in the order of implicit make commands
+in gnu autotools. If all flags are set, the order is
+``$cppflags $cflags|$cxxflags $ldflags command $ldlibs`` for C and C++ and
+``$fflags $cppflags $ldflags command $ldlibs`` for fortran.
+
+
+Architecture specifiers
 ~~~~~~~~~~~~~~~~~~~~~~~
 
 .. Note::
@@ -690,12 +746,9 @@ Architecture specifier
    Architecture specifiers are part of specs but are not yet
    functional. They will be in Spack version 1.0, due in Q3 2015.
 
-The architecture specifier starts with a ``=`` and also comes after
-some package name within a spec.  It allows a user to specify a
-particular architecture for the package to be built.  This is mostly
-used for architectures that need cross-compilation, and in most cases,
-users will not need to specify the architecture when they install a
-package.
+The architecture specifier looks identical to a variant specifier for a
+non-boolean variant. The architecture can be specified only using the
+reserved name ``arch`` (``arch=bgq_os``).
 
 
 .. _sec-virtual-dependencies:
@@ -773,6 +826,23 @@ any MPI implementation will do.  If another package depends on
 error.  Likewise, if you try to plug in some package that doesn't
 provide MPI, Spack will raise an error.
 
+Specifying Specs by Hash
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Complicated specs can become cumbersome to enter on the command line,
+especially when many of the qualifications are necessary to
+distinguish between similar installs, for example when using the
+``uninstall`` command. To avoid this, when referencing an existing spec,
+Spack allows you to reference specs by their hash. We previously
+discussed the spec hash that Spack computes. In place of a spec in any
+command, substitute ``/<hash>`` where ``<hash>`` is any amount from
+the beginning of a spec hash. If the given spec hash is sufficient
+to be unique, Spack will replace the reference with the spec to which
+it refers. Otherwise, it will prompt for a more qualified hash.
+
+Note that this will not work to reinstall a depencency uninstalled by
+``spack uninstall -f``.
+
 .. _spack-providers:
 
 ``spack providers``
@@ -1002,8 +1072,8 @@ than one installed package matches it), then Spack will warn you:
 
    $ spack load libelf
    ==> Error: Multiple matches for spec libelf.  Choose one:
-   libelf@0.8.13%gcc@4.4.7=chaos_5_x86_64_ib
-   libelf@0.8.13%intel@15.0.0=chaos_5_x86_64_ib
+   libelf@0.8.13%gcc@4.4.7 arch=chaos_5_x86_64_ib
+   libelf@0.8.13%intel@15.0.0 arch=chaos_5_x86_64_ib
 
 You can either type the ``spack load`` command again with a fully
 qualified argument, or you can add just enough extra constraints to
@@ -1251,6 +1321,120 @@ regenerate all module and dotkit files from scratch:
 
 .. _extensions:
 
+Filesystem Views
+-------------------------------
+
+.. Maybe this is not the right location for this documentation.
+
+The Spack installation area allows for many package installation trees
+to coexist and gives the user choices as to what versions and variants
+of packages to use.  To use them, the user must rely on a way to
+aggregate a subset of those packages.  The section on Environment
+Modules gives one good way to do that which relies on setting various
+environment variables.  An alternative way to aggregate is through
+**filesystem views**.
+
+A filesystem view is a single directory tree which is the union of the
+directory hierarchies of the individual package installation trees
+that have been included.  The files of the view's installed packages
+are brought into the view by symbolic or hard links back to their
+location in the original Spack installation area.  As the view is
+formed, any clashes due to a file having the exact same path in its
+package installation tree are handled in a first-come-first-served
+basis and a warning is printed.  Packages and their dependencies can
+be both added and removed.  During removal, empty directories will be
+purged.  These operations can be limited to pertain to just the
+packages listed by the user or to exclude specific dependencies and
+they allow for software installed outside of Spack to coexist inside
+the filesystem view tree.
+
+By its nature, a filesystem view represents a particular choice of one
+set of packages among all the versions and variants that are available
+in the Spack installation area.  It is thus equivalent to the
+directory hiearchy that might exist under ``/usr/local``.  While this
+limits a view to including only one version/variant of any package, it
+provides the benefits of having a simpler and traditional layout which
+may be used without any particular knowledge that its packages were
+built by Spack.
+
+Views can be used for a variety of purposes including:
+
+- A central installation in a traditional layout, eg ``/usr/local`` maintained over time by the sysadmin.
+- A self-contained installation area which may for the basis of a top-level atomic versioning scheme, eg ``/opt/pro`` vs ``/opt/dev``.
+- Providing an atomic and monolithic binary distribution, eg for delivery as a single tarball.
+- Producing ephemeral testing or developing environments.
+
+Using Filesystem Views
+~~~~~~~~~~~~~~~~~~~~~~
+
+A filesystem view is created and packages are linked in by the ``spack
+view`` command's ``symlink`` and ``hardlink`` sub-commands.  The
+``spack view remove`` command can be used to unlink some or all of the
+filesystem view.
+
+The following example creates a filesystem view based
+on an installed ``cmake`` package and then removes from the view the
+files in the ``cmake`` package while retaining its dependencies.
+
+.. code-block:: sh
+
+    
+    $ spack view -v symlink myview cmake@3.5.2
+    ==> Linking package: "ncurses"
+    ==> Linking package: "zlib"
+    ==> Linking package: "openssl"
+    ==> Linking package: "cmake"
+    
+    $ ls myview/
+    bin  doc  etc  include  lib  share
+
+    $ ls myview/bin/
+    captoinfo  clear  cpack     ctest    infotocap        openssl  tabs  toe   tset
+    ccmake     cmake  c_rehash  infocmp  ncurses6-config  reset    tic   tput
+    
+    $ spack view -v -d false rm myview cmake@3.5.2
+    ==> Removing package: "cmake"
+    
+    $ ls myview/bin/
+    captoinfo  c_rehash  infotocap        openssl  tabs  toe   tset
+    clear      infocmp   ncurses6-config  reset    tic   tput
+
+
+Limitations of Filesystem Views
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This section describes some limitations that should be considered in
+using filesystems views.  
+
+Filesystem views are merely organizational.  The binary executable
+programs, shared libraries and other build products found in a view
+are mere links into the "real" Spack installation area.  If a view is
+built with symbolic links it requires the Spack-installed package to
+be kept in place.  Building a view with hardlinks removes this
+requirement but any internal paths (eg, rpath or ``#!`` interpreter
+specifications) will still require the Spack-installed package files
+to be in place.
+
+.. FIXME: reference the relocation work of Hegner and Gartung.
+
+As described above, when a view is built only a single instance of a
+file may exist in the unified filesystem tree.  If more than one
+package provides a file at the same path (relative to its own root)
+then it is the first package added to the view that "wins".  A warning
+is printed and it is up to the user to determine if the conflict
+matters.
+
+It is up to the user to assure a consistent view is produced.  In
+particular if the user excludes packages, limits the following of
+dependencies or removes packages the view may become inconsistent.  In
+particular, if two packages require the same sub-tree of dependencies,
+removing one package (recursively) will remove its dependencies and
+leave the other package broken.
+
+
+
+
+
 Extensions & Python support
 ------------------------------------
 
@@ -1282,7 +1466,7 @@ You can find extensions for your Python installation like this:
 .. code-block:: sh
 
    $ spack extensions python
-   ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+   ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
    ==> 36 extensions:
    geos          py-ipython     py-pexpect    py-pyside            py-sip
    py-basemap    py-libxml2     py-pil        py-pytz              py-six
@@ -1372,9 +1556,9 @@ installation:
 .. code-block:: sh
 
    $ spack activate py-numpy
-   ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
-   ==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
-   ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+   ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
+   ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
+   ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
 
 Several things have happened here.  The user requested that
 ``py-numpy`` be activated in the ``python`` installation it was built
@@ -1389,7 +1573,7 @@ packages listed as activated:
 .. code-block:: sh
 
    $ spack extensions python
-   ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+   ==> python@2.7.8%gcc@4.4.7  arch=chaos_5_x86_64_ib-703c7a96
    ==> 36 extensions:
    geos          py-ipython     py-pexpect    py-pyside            py-sip
    py-basemap    py-libxml2     py-pil        py-pytz              py-six
@@ -1437,7 +1621,7 @@ dependencies, you can use ``spack activate -f``:
 .. code-block:: sh
 
    $ spack activate -f py-numpy
-   ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+   ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
 
 .. _spack-deactivate:
 
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
index c0b79a7f4448c52be4734d837e142d583b9552d8..c613071c65bf089ec07a7eab391eeb408bd4fd82 100644
--- a/lib/spack/docs/configuration.rst
+++ b/lib/spack/docs/configuration.rst
@@ -70,9 +70,9 @@ directory. Here's an example of an external configuration:
    packages:
       openmpi:
          paths:
-            openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
-            openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
-            openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+            openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
+            openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
+            openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
 
 This example lists three installations of OpenMPI, one built with gcc,
 one built with gcc and debug information, and another built with Intel.
@@ -108,9 +108,9 @@ be:
   packages:
     openmpi:
       paths:
-        openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
-        openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
-        openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+        openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
+        openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
+        openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
       buildable: False
 
 The addition of the ``buildable`` flag tells Spack that it should never build
diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst
index 0998ba8da4d89005620a5cc246b4f9b8412bcad4..27a3b4b43543f654ca003b66c4247f7072621352 100644
--- a/lib/spack/docs/features.rst
+++ b/lib/spack/docs/features.rst
@@ -31,14 +31,21 @@ platform, all on the command line.
    # Specify a compiler (and its version), with %
    $ spack install mpileaks@1.1.2 %gcc@4.7.3
 
-   # Add special compile-time options with +
+   # Add special compile-time options by name
+   $ spack install mpileaks@1.1.2 %gcc@4.7.3 debug=True
+
+   # Add special boolean compile-time options with +
    $ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug
 
-   # Cross-compile for a different architecture with =
-   $ spack install mpileaks@1.1.2 =bgqos_0
+   # Add compiler flags using the conventional names
+   $ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags=\"-O3 -floop-block\"
+
+   # Cross-compile for a different architecture with arch=
+   $ spack install mpileaks@1.1.2 arch=bgqos_0
 
-Users can specify as many or few options as they care about.  Spack
-will fill in the unspecified values with sensible defaults.
+Users can specify as many or few options as they care about. Spack
+will fill in the unspecified values with sensible defaults. The two listed
+syntaxes for variants are identical when the value is boolean.
 
 
 Customize dependencies
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 63c411ffb58e1f3c7c3769c12e5c1d2304132b44..1f83f611b0611d4fa9e5d7dfc16a82aeab39836b 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1221,11 +1221,13 @@ just as easily provide a version range:
 
    depends_on("libelf@0.8.2:0.8.4:")
 
-Or a requirement for a particular variant:
+Or a requirement for a particular variant or compiler flags:
 
 .. code-block:: python
 
    depends_on("libelf@0.8+debug")
+   depends_on('libelf debug=True')
+   depends_on('libelf cppflags="-fPIC")
 
 Both users *and* package authors can use the same spec syntax to refer
 to different package configurations.  Users use the spec syntax on the
@@ -1623,21 +1625,21 @@ the user runs ``spack install`` and the time the ``install()`` method
 is called.  The concretized version of the spec above might look like
 this::
 
-   mpileaks@2.3%gcc@4.7.3=linux-ppc64
-       ^callpath@1.0%gcc@4.7.3+debug=linux-ppc64
-           ^dyninst@8.1.2%gcc@4.7.3=linux-ppc64
-               ^libdwarf@20130729%gcc@4.7.3=linux-ppc64
-                   ^libelf@0.8.11%gcc@4.7.3=linux-ppc64
-           ^mpich@3.0.4%gcc@4.7.3=linux-ppc64
+   mpileaks@2.3%gcc@4.7.3 arch=linux-ppc64
+       ^callpath@1.0%gcc@4.7.3+debug arch=linux-ppc64
+           ^dyninst@8.1.2%gcc@4.7.3 arch=linux-ppc64
+               ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
+                   ^libelf@0.8.11%gcc@4.7.3 arch=linux-ppc64
+           ^mpich@3.0.4%gcc@4.7.3 arch=linux-ppc64
 
 .. graphviz::
 
    digraph {
-       "mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
-       "mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
-       "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64"
-       "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
-       "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
+       "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
+       "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
+       "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64"
+       "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
+       "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
    }
 
 Here, all versions, compilers, and platforms are filled in, and there
@@ -1666,9 +1668,9 @@ running ``spack spec``.  For example:
        ^libdwarf
            ^libelf
 
-   dyninst@8.0.1%gcc@4.7.3=linux-ppc64
-       ^libdwarf@20130729%gcc@4.7.3=linux-ppc64
-           ^libelf@0.8.13%gcc@4.7.3=linux-ppc64
+   dyninst@8.0.1%gcc@4.7.3 arch=linux-ppc64
+       ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
+           ^libelf@0.8.13%gcc@4.7.3 arch=linux-ppc64
 
 This is useful when you want to know exactly what Spack will do when
 you ask for a particular spec.
@@ -1908,6 +1910,12 @@ the command line.
     ``$rpath_flag`` can be overriden on a compiler specific basis in
     ``lib/spack/spack/compilers/$compiler.py``.
 
+The compiler wrappers also pass the compiler flags specified by the user from
+the command line (``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``,
+and/or ``ldlibs``). They do not override the canonical autotools flags with the
+same names (but in ALL-CAPS) that may be passed into the build by particularly
+challenging package scripts.
+
 Compiler flags
 ~~~~~~~~~~~~~~
 In rare circumstances such as compiling and running small unit tests, a package
@@ -2154,12 +2162,12 @@ example:
        def install(self, prefix):
            # Do default install
 
-       @when('=chaos_5_x86_64_ib')
+       @when('arch=chaos_5_x86_64_ib')
        def install(self, prefix):
            # This will be executed instead of the default install if
            # the package's sys_type() is chaos_5_x86_64_ib.
 
-       @when('=bgqos_0")
+       @when('arch=bgqos_0")
        def install(self, prefix):
            # This will be executed if the package's sys_type is bgqos_0
 
@@ -2749,11 +2757,11 @@ build it:
    $ spack stage libelf
    ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
    ######################################################################## 100.0%
-   ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13.tar.gz
-   ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64.
+   ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13.tar.gz
+   ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64.
    $ spack cd libelf
    $ pwd
-   /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13
+   /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13
 
 ``spack cd`` here changed he current working directory to the
 directory containing the expanded ``libelf`` source code.  There are a
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 9758b74f37653b3b231111205ae791a2494019af..bf98b4c35400c36d6f822394583ba4d8c129a682 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -174,6 +174,28 @@ if [[ -z $command ]]; then
     die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
 fi
 
+#
+# Filter '.' and Spack environment directories out of PATH so that
+# this script doesn't just call itself
+#
+IFS=':' read -ra env_path <<< "$PATH"
+IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
+spack_env_dirs+=("" ".")
+PATH=""
+for dir in "${env_path[@]}"; do
+    addpath=true
+    for env_dir in "${spack_env_dirs[@]}"; do
+        if [[ $dir == $env_dir ]]; then
+            addpath=false
+            break
+        fi
+    done
+    if $addpath; then
+        PATH="${PATH:+$PATH:}$dir"
+    fi
+done
+export PATH
+
 if [[ $mode == vcheck ]]; then
     exec ${command} "$@"
 fi
@@ -286,28 +308,6 @@ unset LD_LIBRARY_PATH
 unset LD_RUN_PATH
 unset DYLD_LIBRARY_PATH
 
-#
-# Filter '.' and Spack environment directories out of PATH so that
-# this script doesn't just call itself
-#
-IFS=':' read -ra env_path <<< "$PATH"
-IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
-spack_env_dirs+=("" ".")
-PATH=""
-for dir in "${env_path[@]}"; do
-    addpath=true
-    for env_dir in "${spack_env_dirs[@]}"; do
-        if [[ $dir == $env_dir ]]; then
-            addpath=false
-            break
-        fi
-    done
-    if $addpath; then
-        PATH="${PATH:+$PATH:}$dir"
-    fi
-done
-export PATH
-
 full_command=("$command" "${args[@]}")
 
 # In test command mode, write out full command for Spack tests.
@@ -324,8 +324,8 @@ fi
 if [[ $SPACK_DEBUG == TRUE ]]; then
     input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
     output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
-    echo "[$mode] $command $input_command" >> $input_log
-    echo "[$mode] ${full_command[@]}" >> $output_log
+    echo "[$mode] $command $input_command" >> "$input_log"
+    echo "[$mode] ${full_command[@]}" >> "$output_log"
 fi
 
 exec "${full_command[@]}"
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index c638b113fdf37f94874f0460a0154b2a9490dafc..ee81e11a20737a2c6ee904a0c35e85770ef277ba 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -64,12 +64,14 @@ def info(message, *args, **kwargs):
     format = kwargs.get('format', '*b')
     stream = kwargs.get('stream', sys.stdout)
     wrap   = kwargs.get('wrap', False)
+    break_long_words = kwargs.get('break_long_words', False)
 
     cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
     for arg in args:
         if wrap:
             lines = textwrap.wrap(
-                str(arg), initial_indent=indent, subsequent_indent=indent)
+                str(arg), initial_indent=indent, subsequent_indent=indent,
+                break_long_words=break_long_words)
             for line in lines:
                 stream.write(line + '\n')
         else:
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 429ba458826c341071eab0408538f05166480e4e..81a83691d7a9cccedc27da1e8b2c19bc4375d29b 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -198,8 +198,13 @@ def colify(elts, **options):
         for col in xrange(cols):
             elt = col * rows + row
             width = config.widths[col] + cextra(elts[elt])
-            fmt = '%%-%ds' % width
-            output.write(fmt % elts[elt])
+            if col < cols - 1:
+                fmt = '%%-%ds' % width
+                output.write(fmt % elts[elt])
+            else:
+                # Don't pad the rightmost column (sapces can wrap on
+                # small teriminals if one line is overlong)
+                output.write(elts[elt])
 
         output.write("\n")
         row += 1
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 927e4c1422f6199b6f4cef7f9e39afc49eb9ac9a..965e3a7f786b7d6769fdc1fbd6d14d04fa980440 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -39,7 +39,9 @@
 lib_path       = join_path(spack_root, "lib", "spack")
 build_env_path = join_path(lib_path, "env")
 module_path    = join_path(lib_path, "spack")
+platform_path  = join_path(module_path, 'platforms')
 compilers_path = join_path(module_path, "compilers")
+operating_system_path = join_path(module_path, 'operating_systems')
 test_path      = join_path(module_path, "test")
 hooks_path     = join_path(module_path, "hooks")
 var_path       = join_path(spack_root, "var", "spack")
@@ -109,7 +111,7 @@
 
 # Version information
 from spack.version import Version
-spack_version = Version("0.9")
+spack_version = Version("0.9.1")
 
 #
 # Executables used by Spack
diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py
index 91d1d2003d2f98e5392afe3e8196f1374d23c222..38cff62af4902ef1ac74c0ba6902c3a514055931 100644
--- a/lib/spack/spack/abi.py
+++ b/lib/spack/spack/abi.py
@@ -35,8 +35,9 @@ class ABI(object):
        The current implementation is rather rough and could be improved."""
 
     def architecture_compatible(self, parent, child):
-        """Returns true iff the parent and child specs have ABI compatible architectures."""
-        return not parent.architecture or not child.architecture or parent.architecture == child.architecture
+        """Returns true iff the parent and child specs have ABI compatible targets."""
+        return not parent.architecture or not child.architecture \
+                        or parent.architecture == child.architecture
 
 
     @memoized
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index b14cb2bea24e6054d19bc72ecbaddd57842c6e8e..cbac7b41d65419f7c4ab8ba01391348e5e09e2f6 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -22,68 +22,498 @@
 # License along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
+"""
+This module contains all the elements that are required to create an
+architecture object. These include, the target processor, the operating system,
+and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
+
+On a multiple architecture machine, the architecture spec field can be set to
+build a package against any target and operating system that is present on the
+platform. On Cray platforms or any other architecture that has different front
+and back end environments, the operating system will determine the method of
+compiler
+detection.
+
+There are two different types of compiler detection:
+    1. Through the $PATH env variable (front-end detection)
+    2. Through the tcl module system. (back-end detection)
+
+Depending on which operating system is specified, the compiler will be detected
+using one of those methods.
+
+For platforms such as linux and darwin, the operating system is autodetected
+and the target is set to be x86_64.
+
+The command line syntax for specifying an architecture is as follows:
+
+    target=<Target name> os=<OperatingSystem name>
+
+If the user wishes to use the defaults, either target or os can be left out of
+the command line and Spack will concretize using the default. These defaults
+are set in the 'platforms/' directory which contains the different subclasses
+for platforms. If the machine has multiple architectures, the user can
+also enter front-end, or fe or back-end or be. These settings will concretize
+to their respective front-end and back-end targets and operating systems.
+Additional platforms can be added by creating a subclass of Platform
+and adding it inside the platform directory.
+
+Platforms are an abstract class that are extended by subclasses. If the user
+wants to add a new type of platform (such as cray_xe), they can create a
+subclass and set all the class attributes such as priority, front_target,
+back_target, front_os, back_os. Platforms also contain a priority class
+attribute. A lower number signifies higher priority. These numbers are
+arbitrarily set and can be changed though often there isn't much need unless a
+new platform is added and the user wants that to be detected first.
+
+Targets are created inside the platform subclasses. Most architecture
+(like linux, and darwin) will have only one target (x86_64) but in the case of
+Cray machines, there is both a frontend and backend processor. The user can
+specify which targets are present on front-end and back-end architecture
+
+Depending on the platform, operating systems are either auto-detected or are
+set. The user can set the front-end and back-end operating setting by the class
+attributes front_os and back_os. The operating system as described earlier,
+will be responsible for compiler detection.
+"""
 import os
-import re
-import platform
+import imp
+import inspect
 
-from llnl.util.lang import memoized
+from llnl.util.lang import memoized, list_modules, key_ordering
+from llnl.util.filesystem import join_path
+import llnl.util.tty as tty
 
 import spack
+import spack.compilers
+from spack.util.naming import mod_to_class
+from spack.util.environment import get_path
+from spack.util.multiproc import parmap
 import spack.error as serr
 
 
 class InvalidSysTypeError(serr.SpackError):
     def __init__(self, sys_type):
-        super(InvalidSysTypeError,
-              self).__init__("Invalid sys_type value for Spack: " + sys_type)
+        super(InvalidSysTypeError, self).__init__(
+            "Invalid sys_type value for Spack: " + sys_type)
 
 
 class NoSysTypeError(serr.SpackError):
     def __init__(self):
-        super(NoSysTypeError,
-              self).__init__("Could not determine sys_type for this machine.")
+        super(NoSysTypeError, self).__init__(
+            "Could not determine sys_type for this machine.")
+
+
+@key_ordering
+class Target(object):
+    """ Target is the processor of the host machine.
+        The host machine may have different front-end and back-end targets,
+        especially if it is a Cray machine. The target will have a name and
+        also the module_name (e.g craype-compiler). Targets will also
+        recognize which platform they came from using the set_platform method.
+        Targets will have compiler finding strategies
+    """
+
+    def __init__(self, name, module_name=None):
+        self.name = name  # case of cray "ivybridge" but if it's x86_64
+        self.module_name = module_name  # craype-ivybridge
+
+    # Sets only the platform name to avoid recursiveness
+
+    def _cmp_key(self):
+        return (self.name, self.module_name)
+
+    def __repr__(self):
+        return self.__str__()
+
+    def __str__(self):
+        return self.name
+
+
+@key_ordering
+class Platform(object):
+    """ Abstract class that each type of Platform will subclass.
+        Will return a instance of it once it
+        is returned
+    """
+
+    priority        = None  # Subclass sets number. Controls detection order
+    front_end       = None
+    back_end        = None
+    default         = None  # The default back end target. On cray ivybridge
+
+    front_os        = None
+    back_os         = None
+    default_os      = None
+
+    def __init__(self, name):
+        self.targets = {}
+        self.operating_sys = {}
+        self.name = name
+
+    def add_target(self, name, target):
+        """Used by the platform specific subclass to list available targets.
+        Raises an error if the platform specifies a name
+        that is reserved by spack as an alias.
+        """
+        if name in ['frontend', 'fe', 'backend', 'be', 'default_target']:
+            raise ValueError(
+                "%s is a spack reserved alias "
+                "and cannot be the name of a target" % name)
+        self.targets[name] = target
+
+    def target(self, name):
+        """This is a getter method for the target dictionary
+        that handles defaulting based on the values provided by default,
+        front-end, and back-end. This can be overwritten
+        by a subclass for which we want to provide further aliasing options.
+        """
+        if name == 'default_target':
+            name = self.default
+        elif name == 'frontend' or name == 'fe':
+            name = self.front_end
+        elif name == 'backend' or name == 'be':
+            name = self.back_end
+
+        return self.targets.get(name, None)
+
+    def add_operating_system(self, name, os_class):
+        """ Add the operating_system class object into the
+            platform.operating_sys dictionary
+        """
+        if name in ['frontend', 'fe', 'backend', 'be', 'default_os']:
+            raise ValueError(
+                "%s is a spack reserved alias "
+                "and cannot be the name of an OS" % name)
+        self.operating_sys[name] = os_class
+
+    def operating_system(self, name):
+        if name == 'default_os':
+            name = self.default_os
+        if name == 'frontend' or name == "fe":
+            name = self.front_os
+        if name == 'backend' or name == 'be':
+            name = self.back_os
+
+        return self.operating_sys.get(name, None)
+
+
+    @classmethod
+    def detect(self):
+        """ Subclass is responsible for implementing this method.
+            Returns True if the Platform class detects that
+            it is the current platform
+            and False if it's not.
+        """
+        raise NotImplementedError()
+
+
+    def __repr__(self):
+        return self.__str__()
+
+
+    def __str__(self):
+        return self.name
+
+
+    def _cmp_key(self):
+        t_keys = ''.join(str(t._cmp_key()) for t in
+                         sorted(self.targets.values()))
+        o_keys = ''.join(str(o._cmp_key()) for o in
+                         sorted(self.operating_sys.values()))
+        return (self.name,
+                self.default,
+                self.front_end,
+                self.back_end,
+                self.default_os,
+                self.front_os,
+                self.back_os,
+                t_keys,
+                o_keys)
+
+
+@key_ordering
+class OperatingSystem(object):
+    """ Operating System will be like a class similar to platform extended
+        by subclasses for the specifics. Operating System will contain the
+        compiler finding logic. Instead of calling two separate methods to
+        find compilers we call find_compilers method for each operating system
+    """
+
+    def __init__(self, name, version):
+        self.name = name
+        self.version = version
+
+    def __str__(self):
+        return self.name + self.version
+
+    def __repr__(self):
+        return self.__str__()
+
+    def _cmp_key(self):
+        return (self.name, self.version)
 
+    def find_compilers(self, *paths):
+        """
+        Return a list of compilers found in the suppied paths.
+        This invokes the find() method for each Compiler class,
+        and appends the compilers detected to a list.
+        """
+        if not paths:
+            paths = get_path('PATH')
+        # Make sure path elements exist, and include /bin directories
+        # under prefixes.
+        filtered_path = []
+        for p in paths:
+            # Eliminate symlinks and just take the real directories.
+            p = os.path.realpath(p)
+            if not os.path.isdir(p):
+                continue
+            filtered_path.append(p)
 
-def get_sys_type_from_spack_globals():
-    """Return the SYS_TYPE from spack globals, or None if it isn't set."""
-    if not hasattr(spack, "sys_type"):
-        return None
-    elif hasattr(spack.sys_type, "__call__"):
-        return spack.sys_type()
+            # Check for a bin directory, add it if it exists
+            bin = join_path(p, 'bin')
+            if os.path.isdir(bin):
+                filtered_path.append(os.path.realpath(bin))
+
+        # Once the paths are cleaned up, do a search for each type of
+        # compiler.  We can spawn a bunch of parallel searches to reduce
+        # the overhead of spelunking all these directories.
+        types = spack.compilers.all_compiler_types()
+        compiler_lists = parmap(lambda cmp_cls:
+                                self.find_compiler(cmp_cls, *filtered_path),
+                                types)
+
+        # ensure all the version calls we made are cached in the parent
+        # process, as well.  This speeds up Spack a lot.
+        clist = reduce(lambda x, y: x+y, compiler_lists)
+        return clist
+
+    def find_compiler(self, cmp_cls, *path):
+        """Try to find the given type of compiler in the user's
+           environment. For each set of compilers found, this returns
+           compiler objects with the cc, cxx, f77, fc paths and the
+           version filled in.
+
+           This will search for compilers with the names in cc_names,
+           cxx_names, etc. and it will group them if they have common
+           prefixes, suffixes, and versions.  e.g., gcc-mp-4.7 would
+           be grouped with g++-mp-4.7 and gfortran-mp-4.7.
+        """
+        dicts = parmap(
+            lambda t: cmp_cls._find_matches_in_path(*t),
+            [(cmp_cls.cc_names,  cmp_cls.cc_version)  + tuple(path),
+             (cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
+             (cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
+             (cmp_cls.fc_names,  cmp_cls.fc_version)  + tuple(path)])
+
+        all_keys = set()
+        for d in dicts:
+            all_keys.update(d)
+
+        compilers = {}
+        for k in all_keys:
+            ver, pre, suf = k
+
+            # Skip compilers with unknown version.
+            if ver == 'unknown':
+                continue
+
+            paths = tuple(pn[k] if k in pn else None for pn in dicts)
+            spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
+
+            if ver in compilers:
+                prev = compilers[ver]
+
+                # prefer the one with more compilers.
+                prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
+                newcount = len([p for p in paths       if p is not None])
+                prevcount = len([p for p in prev_paths if p is not None])
+
+                # Don't add if it's not an improvement over prev compiler.
+                if newcount <= prevcount:
+                    continue
+
+            compilers[ver] = cmp_cls(spec, self, paths)
+
+        return list(compilers.values())
+
+    def to_dict(self):
+        d = {}
+        d['name'] = self.name
+        d['version'] = self.version
+        return d
+
+@key_ordering
+class Arch(object):
+    "Architecture is now a class to help with setting attributes"
+
+    def __init__(self, platform=None, platform_os=None, target=None):
+        self.platform = platform
+        if platform and platform_os:
+                platform_os = self.platform.operating_system(platform_os)
+        self.platform_os = platform_os
+        if platform and target:
+            target = self.platform.target(target)
+        self.target = target
+
+        # Hooks for parser to use when platform is set after target or os
+        self.target_string = None
+        self.os_string = None
+
+    @property
+    def concrete(self):
+        return all((self.platform is not None,
+                    isinstance(self.platform, Platform),
+                    self.platform_os is not None,
+                    isinstance(self.platform_os, OperatingSystem),
+                    self.target is not None, isinstance(self.target, Target)))
+
+    def __str__(self):
+        if self.platform or self.platform_os or self.target:
+            if self.platform.name == 'darwin':
+                os_name = self.platform_os.name if self.platform_os else "None"
+            else:
+                os_name = str(self.platform_os)
+
+            return (str(self.platform) + "-" +
+                    os_name + "-" + str(self.target))
+        else:
+            return ''
+
+
+    def __contains__(self, string):
+        return string in str(self)
+
+
+    def _cmp_key(self):
+        if isinstance(self.platform, Platform):
+            platform = self.platform.name
+        else:
+            platform = self.platform
+        if isinstance(self.platform_os, OperatingSystem):
+            platform_os = self.platform_os.name
+        else:
+            platform_os = self.platform_os
+        if isinstance(self.target, Target):
+            target = self.target.name
+        else:
+            target = self.target
+        return (platform, platform_os, target)
+
+    def to_dict(self):
+        d = {}
+        d['platform'] = str(self.platform) if self.platform else None
+        d['platform_os'] = str(self.platform_os) if self.platform_os else None
+        d['target'] = str(self.target) if self.target else None
+
+        return d
+
+
+def _target_from_dict(target_name, platform=None):
+    """ Creates new instance of target and assigns all the attributes of
+        that target from the dictionary
+    """
+    if not platform:
+        platform = sys_type()
+    return platform.target(target_name)
+
+
+def _operating_system_from_dict(os_name, platform=None):
+    """ uses platform's operating system method to grab the constructed
+        operating systems that are valid on the platform.
+    """
+    if not platform:
+        platform = sys_type()
+    if isinstance(os_name, dict):
+        name = os_name['name']
+        version = os_name['version']
+        return platform.operating_system(name+version)
     else:
-        return spack.sys_type
+        return platform.operating_system(os_name)
+
 
+def _platform_from_dict(platform_name):
+    """ Constructs a platform from a dictionary. """
+    platform_list = all_platforms()
+    for p in platform_list:
+        if platform_name.replace("_", "").lower() == p.__name__.lower():
+            return p()
 
-def get_sys_type_from_environment():
-    """Return $SYS_TYPE or None if it's not defined."""
-    return os.environ.get('SYS_TYPE')
 
+def arch_from_dict(d):
+    """ Uses _platform_from_dict, _operating_system_from_dict, _target_from_dict
+        helper methods to recreate the arch tuple from the dictionary read from
+        a yaml file
+    """
+    arch = Arch()
 
-def get_sys_type_from_platform():
-    """Return the architecture from Python's platform module."""
-    sys_type = platform.system() + '-' + platform.machine()
-    sys_type = re.sub(r'[^\w-]', '_', sys_type)
-    return sys_type.lower()
+    if isinstance(d, basestring):
+        # We have an old spec using a string for the architecture
+        arch.platform = Platform('spack_compatibility')
+        arch.platform_os = OperatingSystem('unknown', '')
+        arch.target = Target(d)
+
+        arch.os_string = None
+        arch.target_string = None
+    else:
+        if d is None:
+            return None
+        platform_name = d['platform']
+        os_name = d['platform_os']
+        target_name = d['target']
+
+        if platform_name:
+            arch.platform = _platform_from_dict(platform_name)
+        else:
+            arch.platform = None
+        if target_name:
+            arch.target = _target_from_dict(target_name, arch.platform)
+        else:
+            arch.target = None
+        if os_name:
+            arch.platform_os = _operating_system_from_dict(os_name,
+                                                           arch.platform)
+        else:
+            arch.platform_os = None
+
+        arch.os_string = None
+        arch.target_string = None
+
+    return arch
 
 
 @memoized
-def sys_type():
-    """Returns a SysType for the current machine."""
-    methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
-               get_sys_type_from_platform]
+def all_platforms():
+    classes = []
+    mod_path = spack.platform_path
+    parent_module = "spack.platforms"
+
+    for name in list_modules(mod_path):
+        mod_name = '%s.%s' % (parent_module, name)
+        class_name = mod_to_class(name)
+        mod = __import__(mod_name, fromlist=[class_name])
+        if not hasattr(mod, class_name):
+            tty.die('No class %s defined in %s' % (class_name, mod_name))
+        cls = getattr(mod, class_name)
+        if not inspect.isclass(cls):
+            tty.die('%s.%s is not a class' % (mod_name, class_name))
+
+        classes.append(cls)
 
-    # search for a method that doesn't return None
-    sys_type = None
-    for method in methods:
-        sys_type = method()
-        if sys_type:
-            break
+    return classes
 
-    # Couldn't determine the sys_type for this machine.
-    if sys_type is None:
-        return "unknown_arch"
 
-    if not isinstance(sys_type, basestring):
-        raise InvalidSysTypeError(sys_type)
+@memoized
+def sys_type():
+    """ Gather a list of all available subclasses of platforms.
+        Sorts the list according to their priority looking. Priority is
+        an arbitrarily set number. Detects platform either using uname or
+        a file path (/opt/cray...)
+    """
+    # Try to create a Platform object using the config file FIRST
+    platform_list = all_platforms()
+    platform_list.sort(key=lambda a: a.priority)
 
-    return sys_type
+    for platform in platform_list:
+        if platform.detect():
+            return platform()
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index d87aaa6285833442640593ba1c3268dfdc4250b1..7c65091d49caede579ab8b33ee60637e65b7062b 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -113,9 +113,66 @@ def __call__(self, *args, **kwargs):
 
         return super(MakeExecutable, self).__call__(*args, **kwargs)
 
+def load_module(mod):
+    """Takes a module name and removes modules until it is possible to
+    load that module. It then loads the provided module. Depends on the
+    modulecmd implementation of modules used in cray and lmod.
+    """
+    #Create an executable of the module command that will output python code
+    modulecmd = which('modulecmd')
+    modulecmd.add_default_arg('python')
+
+    # Read the module and remove any conflicting modules
+    # We do this without checking that they are already installed
+    # for ease of programming because unloading a module that is not
+    # loaded does nothing.
+    text = modulecmd('show', mod, output=str, error=str).split()
+    for i, word in enumerate(text):
+        if word == 'conflict':
+            exec(compile(modulecmd('unload', text[i+1], output=str, error=str), '<string>', 'exec'))
+    # Load the module now that there are no conflicts
+    load = modulecmd('load', mod, output=str, error=str)
+    exec(compile(load, '<string>', 'exec'))
+
+def get_path_from_module(mod):
+    """Inspects a TCL module for entries that indicate the absolute path
+    at which the library supported by said module can be found.
+    """
+    # Create a modulecmd executable
+    modulecmd = which('modulecmd')
+    modulecmd.add_default_arg('python')
+
+    # Read the module
+    text = modulecmd('show', mod, output=str, error=str).split('\n')
+    # If it lists its package directory, return that
+    for line in text:
+        if line.find(mod.upper()+'_DIR') >= 0:
+            words = line.split()
+            return words[2]
+
+    # If it lists a -rpath instruction, use that
+    for line in text:
+        rpath = line.find('-rpath/')
+        if rpath >= 0:
+            return line[rpath+6:line.find('/lib')]
+
+    # If it lists a -L instruction, use that
+    for line in text:
+        L = line.find('-L/')
+        if L >= 0:
+            return line[L+2:line.find('/lib')]
+
+    # If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that
+    for line in text:
+        if line.find('LD_LIBRARY_PATH') >= 0: 
+            words = line.split()
+            path = words[2]
+            return path[:path.find('/lib')]
+    # Unable to find module path
+    return None
 
 def set_compiler_environment_variables(pkg, env):
-    assert pkg.spec.concrete
+    assert(pkg.spec.concrete)
     compiler = pkg.compiler
     flags = pkg.spec.compiler_flags
 
@@ -154,6 +211,10 @@ def set_compiler_environment_variables(pkg, env):
             env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
 
     env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
+
+    for mod in compiler.modules:
+        load_module(mod)
+
     return env
 
 
@@ -212,13 +273,15 @@ def set_build_environment_variables(pkg, env):
     env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
 
     # Add any pkgconfig directories to PKG_CONFIG_PATH
-    pkg_config_dirs = []
-    for p in dep_prefixes:
-        for maybe in ('lib', 'lib64', 'share'):
-            pcdir = join_path(p, maybe, 'pkgconfig')
+    for pre in dep_prefixes:
+        for directory in ('lib', 'lib64', 'share'):
+            pcdir = join_path(pre, directory, 'pkgconfig')
             if os.path.isdir(pcdir):
-                pkg_config_dirs.append(pcdir)
-    env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
+                #pkg_config_dirs.append(pcdir)
+                env.prepend_path('PKG_CONFIG_PATH',pcdir)
+
+    if pkg.spec.architecture.target.module_name:
+        load_module(pkg.spec.architecture.target.module_name)
 
     return env
 
@@ -301,6 +364,10 @@ def get_rpaths(pkg):
                   if os.path.isdir(d.prefix.lib))
     rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
                   if os.path.isdir(d.prefix.lib64))
+    # Second module is our compiler mod name. We use that to get rpaths from
+    # module show output. 
+    if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
+        rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
     return rpaths
 
 
@@ -317,6 +384,13 @@ def parent_class_modules(cls):
     return result
 
 
+def load_external_modules(pkg):
+    """ traverse the spec list and find any specs that have external modules.
+    """
+    for dep in list(pkg.spec.traverse()):
+        if dep.external_module:
+            load_module(dep.external_module)
+    
 def setup_package(pkg):
     """Execute all environment setup routines."""
     spack_env = EnvironmentModifications()
@@ -340,7 +414,7 @@ def setup_package(pkg):
 
     set_compiler_environment_variables(pkg, spack_env)
     set_build_environment_variables(pkg, spack_env)
-
+    load_external_modules(pkg)
     # traverse in postorder so package can use vars from its dependencies
     spec = pkg.spec
     for dspec in pkg.spec.traverse(order='post', root=False):
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index dc96dd0faaaa234e04b5f12ea7085dec335e2985..cf2f96fd21fc5a6c705a931b43d569e55faa6a85 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -28,8 +28,4 @@
 description = "Print the architecture for this machine"
 
 def arch(parser, args):
-    configured_sys_type = architecture.get_sys_type_from_spack_globals()
-    if not configured_sys_type:
-        configured_sys_type = "autodetect"
-    print "Configured sys_type:             %s" % configured_sys_type
-    print "Autodetected default sys_type:   %s" % architecture.sys_type()
+    print architecture.sys_type()
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index dc7731a290f87313c2a7a8bb4df3fce80a8d3645..c95045ef85db5d5a8ccc1ff9105bb336656276e5 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -70,7 +70,7 @@ def setup_parser(subparser):
 
 
 def compiler_find(args):
-    """Search either $PATH or a list of paths for compilers and add them
+    """Search either $PATH or a list of paths OR MODULES for compilers and add them
        to Spack's configuration."""
     paths = args.add_paths
     if not paths:
@@ -78,7 +78,6 @@ def compiler_find(args):
 
     compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
                  if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
-
     if compilers:
         spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
         n = len(compilers)
@@ -93,7 +92,6 @@ def compiler_find(args):
 def compiler_remove(args):
     cspec = CompilerSpec(args.compiler_spec)
     compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
-
     if not compilers:
         tty.die("No compilers match spec %s" % cspec)
     elif not args.all and len(compilers) > 1:
@@ -121,6 +119,8 @@ def compiler_info(args):
             print "\tcxx = %s" % c.cxx
             print "\tf77 = %s" % c.f77
             print "\tfc  = %s" % c.fc
+            print "\tmodules  = %s" % c.modules
+            print "\toperating system  = %s" % c.operating_system
 
 
 def compiler_list(args):
@@ -135,10 +135,10 @@ def compiler_list(args):
 
 
 def compiler(parser, args):
-    action = { 'add'    : compiler_find,
-               'find'   : compiler_find,
-               'remove' : compiler_remove,
-               'rm'     : compiler_remove,
-               'info'   : compiler_info,
-               'list'   : compiler_list }
+    action = {'add'    : compiler_find, 
+              'find'   : compiler_find,
+              'remove' : compiler_remove,
+              'rm'     : compiler_remove,
+              'info'   : compiler_info,
+              'list'   : compiler_list }
     action[args.compiler_command](args)
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index c22268d534561be497c040e58af76217896b4b05..3ec671f93f6155f8f4430bb9692c31af02b687ac 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -31,6 +31,7 @@
 from llnl.util.lang import *
 from llnl.util.tty.colify import *
 from llnl.util.tty.color import *
+from llnl.util.lang import *
 
 description = "Find installed spack packages"
 
@@ -85,6 +86,11 @@ def setup_parser(subparser):
         action='store_true',
         dest='missing',
         help='Show missing dependencies as well as installed specs.')
+    subparser.add_argument(
+        '-v', '--variants',
+        action='store_true',
+        dest='variants',
+        help='Show variants in output (can be long)')
     subparser.add_argument('-M', '--only-missing',
                            action='store_true',
                            dest='only_missing',
@@ -106,6 +112,8 @@ def display_specs(specs, **kwargs):
     mode = kwargs.get('mode', 'short')
     hashes = kwargs.get('long', False)
     namespace = kwargs.get('namespace', False)
+    flags = kwargs.get('show_flags', False)
+    variants = kwargs.get('variants', False)
 
     hlen = 7
     if kwargs.get('very_long', False):
@@ -113,10 +121,9 @@ def display_specs(specs, **kwargs):
         hlen = None
 
     nfmt = '.' if namespace else '_'
-    format_string = '$%s$@$+' % nfmt
-    flags = kwargs.get('show_flags', False)
-    if flags:
-        format_string = '$%s$@$%%+$+' % nfmt
+    ffmt = '$%+' if flags else ''
+    vfmt = '$+' if variants else ''
+    format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
 
     # Make a dict with specs keyed by architecture and compiler.
     index = index_by(specs, ('architecture', 'compiler'))
@@ -162,7 +169,7 @@ def fmt(s):
                     string = ""
                     if hashes:
                         string += gray_hash(s, hlen) + ' '
-                    string += s.format('$-%s$@$+' % nfmt, color=True)
+                    string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
 
                     return string
 
@@ -180,6 +187,29 @@ def fmt(s):
                 "deps, short)." % mode)  # NOQA: ignore=E501
 
 
+def query_arguments(args):
+    # Check arguments
+    if args.explicit and args.implicit:
+        tty.error('You can\'t pass -E and -e options simultaneously.')
+        raise SystemExit(1)
+
+    # Set up query arguments.
+    installed, known = True, any
+    if args.only_missing:
+        installed = False
+    elif args.missing:
+        installed = any
+    if args.unknown:
+        known = False
+    explicit = any
+    if args.explicit:
+        explicit = True
+    if args.implicit:
+        explicit = False
+    q_args = {'installed': installed, 'known': known, "explicit": explicit}
+    return q_args
+
+
 def find(parser, args):
     # Filter out specs that don't exist.
     query_specs = spack.cmd.parse_specs(args.query_specs)
@@ -194,22 +224,7 @@ def find(parser, args):
         if not query_specs:
             return
 
-    # Set up query arguments.
-    installed, known = True, any
-    if args.only_missing:
-        installed = False
-    elif args.missing:
-        installed = any
-    if args.unknown:
-        known = False
-
-    explicit = any
-    if args.explicit:
-        explicit = False
-    if args.implicit:
-        explicit = True
-
-    q_args = {'installed': installed, 'known': known, "explicit": explicit}
+    q_args = query_arguments(args)
 
     # Get all the specs the user asked for
     if not query_specs:
@@ -228,4 +243,6 @@ def find(parser, args):
                   mode=args.mode,
                   long=args.long,
                   very_long=args.very_long,
-                  show_flags=args.show_flags)
+                  show_flags=args.show_flags,
+                  namespace=args.namespace,
+                  variants=args.variants)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 9fdf3045b28846459eee1bd948a4c1d2fe0d6328..a6f08d09ed8c526b43b0b58b904f234e2fa07428 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -39,6 +39,13 @@
     b) use spack uninstall -a to uninstall ALL matching specs.
 """
 
+# Arguments for display_specs when we find ambiguity
+display_args = {
+    'long': True,
+    'show_flags': True,
+    'variants':True
+}
+
 
 def ask_for_confirmation(message):
     while True:
@@ -92,7 +99,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
         if not allow_multiple_matches and len(matching) > 1:
             tty.error("%s matches multiple packages:" % spec)
             print()
-            display_specs(matching, long=True, show_flags=True)
+            display_specs(matching, **display_args)
             print()
             has_errors = True
 
@@ -172,7 +179,7 @@ def uninstall(parser, args):
                 tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
                 print('')
                 print("The following packages depend on it:")
-                display_specs(lst, long=True)
+                display_specs(lst, **display_args)
                 print('')
                 has_error = True
         elif args.dependents:
@@ -186,7 +193,7 @@ def uninstall(parser, args):
         if not args.yes_to_all:
             tty.msg("The following packages will be uninstalled : ")
             print('')
-            display_specs(uninstall_list, long=True, show_flags=True)
+            display_specs(uninstall_list, **display_args)
             print('')
             ask_for_confirmation('Do you want to proceed ? ')
 
diff --git a/lib/spack/spack/cmd/view.py b/lib/spack/spack/cmd/view.py
new file mode 100644
index 0000000000000000000000000000000000000000..8f1fc9be740d8e5b20bbdcd2a23e156d73cf2b5e
--- /dev/null
+++ b/lib/spack/spack/cmd/view.py
@@ -0,0 +1,295 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+'''Produce a "view" of a Spack DAG.
+
+A "view" is file hierarchy representing the union of a number of
+Spack-installed package file hierarchies.  The union is formed from:
+
+- specs resolved from the package names given by the user (the seeds)
+
+- all depenencies of the seeds unless user specifies `--no-depenencies`
+
+- less any specs with names matching the regular expressions given by
+  `--exclude`
+
+The `view` can be built and tore down via a number of methods (the "actions"):
+
+- symlink :: a file system view which is a directory hierarchy that is
+  the union of the hierarchies of the installed packages in the DAG
+  where installed files are referenced via symlinks.
+
+- hardlink :: like the symlink view but hardlinks are used.
+
+- statlink :: a view producing a status report of a symlink or
+  hardlink view.
+
+The file system view concept is imspired by Nix, implemented by
+brett.viren@gmail.com ca 2016.
+
+'''
+# Implementation notes:
+#
+# This is implemented as a visitor pattern on the set of package specs.
+#
+# The command line ACTION maps to a visitor_*() function which takes
+# the set of package specs and any args which may be specific to the
+# ACTION.
+#
+# To add a new view:
+# 1. add a new cmd line args sub parser ACTION
+# 2. add any action-specific options/arguments, most likely a list of specs.
+# 3. add a visitor_MYACTION() function
+# 4. add any visitor_MYALIAS assignments to match any command line aliases
+
+import os
+import re
+import spack
+import spack.cmd
+import llnl.util.tty as tty
+
+description = "Produce a single-rooted directory view of a spec."
+
+
+def setup_parser(sp):
+    setup_parser.parser = sp
+
+    sp.add_argument(
+        '-v', '--verbose', action='store_true', default=False,
+        help="Display verbose output.")
+    sp.add_argument(
+        '-e', '--exclude', action='append', default=[],
+        help="Exclude packages with names matching the given regex pattern.")
+    sp.add_argument(
+        '-d', '--dependencies', choices=['true', 'false', 'yes', 'no'],
+        default='true',
+        help="Follow dependencies.")
+
+    ssp = sp.add_subparsers(metavar='ACTION', dest='action')
+
+    specs_opts = dict(metavar='spec', nargs='+',
+                      help="Seed specs of the packages to view.")
+
+    # The action parameterizes the command but in keeping with Spack
+    # patterns we make it a subcommand.
+    file_system_view_actions = [
+        ssp.add_parser(
+            'symlink', aliases=['add', 'soft'],
+            help='Add package files to a filesystem view via symbolic links.'),
+        ssp.add_parser(
+            'hardlink', aliases=['hard'],
+            help='Add packages files to a filesystem via via hard links.'),
+        ssp.add_parser(
+            'remove', aliases=['rm'],
+            help='Remove packages from a filesystem view.'),
+        ssp.add_parser(
+            'statlink', aliases=['status', 'check'],
+            help='Check status of packages in a filesystem view.')
+    ]
+    # All these options and arguments are common to every action.
+    for act in file_system_view_actions:
+        act.add_argument('path', nargs=1,
+                         help="Path to file system view directory.")
+        act.add_argument('specs', **specs_opts)
+
+    return
+
+
+def assuredir(path):
+    'Assure path exists as a directory'
+    if not os.path.exists(path):
+        os.makedirs(path)
+
+
+def relative_to(prefix, path):
+    'Return end of `path` relative to `prefix`'
+    assert 0 == path.find(prefix)
+    reldir = path[len(prefix):]
+    if reldir.startswith('/'):
+        reldir = reldir[1:]
+    return reldir
+
+
+def transform_path(spec, path, prefix=None):
+    'Return the a relative path corresponding to given path spec.prefix'
+    if os.path.isabs(path):
+        path = relative_to(spec.prefix, path)
+    subdirs = path.split(os.path.sep)
+    if subdirs[0] == '.spack':
+        lst = ['.spack', spec.name] + subdirs[1:]
+        path = os.path.join(*lst)
+    if prefix:
+        path = os.path.join(prefix, path)
+    return path
+
+
+def purge_empty_directories(path):
+    '''Ascend up from the leaves accessible from `path`
+    and remove empty directories.'''
+    for dirpath, subdirs, files in os.walk(path, topdown=False):
+        for sd in subdirs:
+            sdp = os.path.join(dirpath, sd)
+            try:
+                os.rmdir(sdp)
+            except OSError:
+                pass
+
+
+def filter_exclude(specs, exclude):
+    'Filter specs given sequence of exclude regex'
+    to_exclude = [re.compile(e) for e in exclude]
+
+    def exclude(spec):
+        for e in to_exclude:
+            if e.match(spec.name):
+                return True
+        return False
+    return [s for s in specs if not exclude(s)]
+
+
+def flatten(seeds, descend=True):
+    'Normalize and flattend seed specs and descend hiearchy'
+    flat = set()
+    for spec in seeds:
+        if not descend:
+            flat.add(spec)
+            continue
+        flat.update(spec.normalized().traverse())
+    return flat
+
+
+def check_one(spec, path, verbose=False):
+    'Check status of view in path against spec'
+    dotspack = os.path.join(path, '.spack', spec.name)
+    if os.path.exists(os.path.join(dotspack)):
+        tty.info('Package in view: "%s"' % spec.name)
+        return
+    tty.info('Package not in view: "%s"' % spec.name)
+    return
+
+
+def remove_one(spec, path, verbose=False):
+    'Remove any files found in `spec` from `path` and purge empty directories.'
+
+    if not os.path.exists(path):
+        return                  # done, short circuit
+
+    dotspack = transform_path(spec, '.spack', path)
+    if not os.path.exists(dotspack):
+        if verbose:
+            tty.info('Skipping nonexistent package: "%s"' % spec.name)
+        return
+
+    if verbose:
+        tty.info('Removing package: "%s"' % spec.name)
+    for dirpath, dirnames, filenames in os.walk(spec.prefix):
+        if not filenames:
+            continue
+        targdir = transform_path(spec, dirpath, path)
+        for fname in filenames:
+            dst = os.path.join(targdir, fname)
+            if not os.path.exists(dst):
+                continue
+            os.unlink(dst)
+
+
+def link_one(spec, path, link=os.symlink, verbose=False):
+    'Link all files in `spec` into directory `path`.'
+
+    dotspack = transform_path(spec, '.spack', path)
+    if os.path.exists(dotspack):
+        tty.warn('Skipping existing package: "%s"' % spec.name)
+        return
+
+    if verbose:
+        tty.info('Linking package: "%s"' % spec.name)
+    for dirpath, dirnames, filenames in os.walk(spec.prefix):
+        if not filenames:
+            continue        # avoid explicitly making empty dirs
+
+        targdir = transform_path(spec, dirpath, path)
+        assuredir(targdir)
+
+        for fname in filenames:
+            src = os.path.join(dirpath, fname)
+            dst = os.path.join(targdir, fname)
+            if os.path.exists(dst):
+                if '.spack' in dst.split(os.path.sep):
+                    continue    # silence these
+                tty.warn("Skipping existing file: %s" % dst)
+                continue
+            link(src, dst)
+
+
+def visitor_symlink(specs, args):
+    'Symlink all files found in specs'
+    path = args.path[0]
+    assuredir(path)
+    for spec in specs:
+        link_one(spec, path, verbose=args.verbose)
+visitor_add = visitor_symlink
+visitor_soft = visitor_symlink
+
+
+def visitor_hardlink(specs, args):
+    'Hardlink all files found in specs'
+    path = args.path[0]
+    assuredir(path)
+    for spec in specs:
+        link_one(spec, path, os.link, verbose=args.verbose)
+visitor_hard = visitor_hardlink
+
+
+def visitor_remove(specs, args):
+    'Remove all files and directories found in specs from args.path'
+    path = args.path[0]
+    for spec in specs:
+        remove_one(spec, path, verbose=args.verbose)
+    purge_empty_directories(path)
+visitor_rm = visitor_remove
+
+
+def visitor_statlink(specs, args):
+    'Give status of view in args.path relative to specs'
+    path = args.path[0]
+    for spec in specs:
+        check_one(spec, path, verbose=args.verbose)
+visitor_status = visitor_statlink
+visitor_check = visitor_statlink
+
+
+def view(parser, args):
+    'Produce a view of a set of packages.'
+
+    # Process common args
+    seeds = [spack.cmd.disambiguate_spec(s) for s in args.specs]
+    specs = flatten(seeds, args.dependencies.lower() in ['yes', 'true'])
+    specs = filter_exclude(specs, args.exclude)
+
+    # Execute the visitation.
+    try:
+        visitor = globals()['visitor_' + args.action]
+    except KeyError:
+        tty.error('Unknown action: "%s"' % args.action)
+    visitor(specs, args)
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 2ae305f20150a5ee69b8b6b77c94d98b34ee4166..ce4555bc5653f4b0d64c6084dd864456850656f4 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -33,6 +33,7 @@
 
 import spack.error
 import spack.spec
+import spack.architecture
 from spack.util.multiproc import parmap
 from spack.util.executable import *
 from spack.util.environment import get_path
@@ -107,19 +108,32 @@ def f77_rpath_arg(self):
     @property
     def fc_rpath_arg(self):
         return '-Wl,-rpath,'
+    # Cray PrgEnv name that can be used to load this compiler
+    PrgEnv = None
+    # Name of module used to switch versions of this compiler
+    PrgEnv_compiler = None
 
-
-    def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
+    def __init__(self, cspec, operating_system, 
+                 paths, modules=[], alias=None, **kwargs):
         def check(exe):
             if exe is None:
                 return None
             _verify_executables(exe)
             return exe
 
-        self.cc  = check(cc)
-        self.cxx = check(cxx)
-        self.f77 = check(f77)
-        self.fc  = check(fc)
+        self.cc  = check(paths[0])
+        self.cxx = check(paths[1])
+        if len(paths) > 2:
+            self.f77 = check(paths[2])
+            if len(paths) == 3:
+                self.fc = self.f77
+            else:
+                self.fc  = check(paths[3])
+
+        #self.cc  = check(cc)
+        #self.cxx = check(cxx)
+        #self.f77 = check(f77)
+        #self.fc  = check(fc)
 
         # Unfortunately have to make sure these params are accepted
         # in the same order they are returned by sorted(flags)
@@ -130,8 +144,10 @@ def check(exe):
             if value is not None:
                 self.flags[flag] = value.split()
 
+        self.operating_system = operating_system
         self.spec = cspec
-
+        self.modules = modules
+        self.alias = alias
 
     @property
     def version(self):
@@ -258,57 +274,6 @@ def check(key):
         successful.reverse()
         return dict(((v, p, s), path) for v, p, s, path in successful)
 
-    @classmethod
-    def find(cls, *path):
-        """Try to find this type of compiler in the user's
-           environment. For each set of compilers found, this returns
-           compiler objects with the cc, cxx, f77, fc paths and the
-           version filled in.
-
-           This will search for compilers with the names in cc_names,
-           cxx_names, etc. and it will group them if they have common
-           prefixes, suffixes, and versions.  e.g., gcc-mp-4.7 would
-           be grouped with g++-mp-4.7 and gfortran-mp-4.7.
-        """
-        dicts = parmap(
-            lambda t: cls._find_matches_in_path(*t),
-            [(cls.cc_names,  cls.cc_version)  + tuple(path),
-             (cls.cxx_names, cls.cxx_version) + tuple(path),
-             (cls.f77_names, cls.f77_version) + tuple(path),
-             (cls.fc_names,  cls.fc_version)  + tuple(path)])
-
-        all_keys = set()
-        for d in dicts:
-            all_keys.update(d)
-
-        compilers = {}
-        for k in all_keys:
-            ver, pre, suf = k
-
-            # Skip compilers with unknown version.
-            if ver == 'unknown':
-                continue
-
-            paths = tuple(pn[k] if k in pn else None for pn in dicts)
-            spec = spack.spec.CompilerSpec(cls.name, ver)
-
-            if ver in compilers:
-                prev = compilers[ver]
-
-                # prefer the one with more compilers.
-                prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
-                newcount  = len([p for p in paths      if p is not None])
-                prevcount = len([p for p in prev_paths if p is not None])
-
-                # Don't add if it's not an improvement over prev compiler.
-                if newcount <= prevcount:
-                    continue
-
-            compilers[ver] = cls(spec, *paths)
-
-        return list(compilers.values())
-
-
     def __repr__(self):
         """Return a string representation of the compiler toolchain."""
         return self.__str__()
@@ -317,7 +282,7 @@ def __repr__(self):
     def __str__(self):
         """Return a string representation of the compiler toolchain."""
         return "%s(%s)" % (
-            self.name, '\n     '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
+            self.name, '\n     '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system)))))
 
 
 class CompilerAccessError(spack.error.SpackError):
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 7c951ae8bcdadd8191e39cca02ea553e52d234fe..ae72b743b2dba5b7b7911b567c28b69f5c15f411 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -28,6 +28,11 @@
 import imp
 import os
 import platform
+import copy
+import hashlib
+import base64
+import yaml
+import sys
 
 from llnl.util.lang import memoized, list_modules
 from llnl.util.filesystem import join_path
@@ -45,7 +50,9 @@
 from spack.util.environment import get_path
 
 _imported_compilers_module = 'spack.compilers'
-_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_other_instance_vars = ['modules', 'operating_system']
+_cache_config_file = []
 
 # TODO: customize order in config file
 if platform.system() == 'Darwin':
@@ -64,107 +71,105 @@ def converter(cspec_like, *args, **kwargs):
 
 def _to_dict(compiler):
     """Return a dict version of compiler suitable to insert in YAML."""
-    return {
-        str(compiler.spec) : dict(
-            (attr, getattr(compiler, attr, None))
-            for attr in _required_instance_vars)
-    }
+    d = {}
+    d['spec'] = str(compiler.spec)
+    d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars )
+    d['operating_system'] = str(compiler.operating_system)
+    d['modules'] = compiler.modules if compiler.modules else []
 
+    if compiler.alias:
+        d['alias'] = compiler.alias
 
-def get_compiler_config(arch=None, scope=None):
+    return {'compiler': d}
+
+
+def get_compiler_config(scope=None):
     """Return the compiler configuration for the specified architecture.
     """
-    # Check whether we're on a front-end (native) architecture.
-    my_arch = spack.architecture.sys_type()
-    if arch is None:
-        arch = my_arch
-
     def init_compiler_config():
         """Compiler search used when Spack has no compilers."""
-        config[arch] = {}
-        compilers = find_compilers(*get_path('PATH'))
+        compilers = find_compilers()
+        compilers_dict = []
         for compiler in compilers:
-            config[arch].update(_to_dict(compiler))
-        spack.config.update_config('compilers', config, scope=scope)
+            compilers_dict.append(_to_dict(compiler))
+        spack.config.update_config('compilers', compilers_dict, scope=scope)
 
     config = spack.config.get_config('compilers', scope=scope)
-
     # Update the configuration if there are currently no compilers
     # configured.  Avoid updating automatically if there ARE site
     # compilers configured but no user ones.
-    if arch == my_arch and arch not in config:
+    if not config:
         if scope is None:
             # We know no compilers were configured in any scope.
             init_compiler_config()
+            config = spack.config.get_config('compilers', scope=scope)
         elif scope == 'user':
             # Check the site config and update the user config if
             # nothing is configured at the site level.
             site_config = spack.config.get_config('compilers', scope='site')
             if not site_config:
                 init_compiler_config()
-
-    return config[arch] if arch in config else {}
+                config = spack.config.get_config('compilers', scope=scope)
+        return config
+    elif config:
+        return config
+    else:
+        return []  # Return empty list which we will later append to.
 
 
-def add_compilers_to_config(compilers, arch=None, scope=None):
+def add_compilers_to_config(compilers, scope=None):
     """Add compilers to the config for the specified architecture.
 
     Arguments:
       - compilers: a list of Compiler objects.
-      - arch:      arch to add compilers for.
       - scope:     configuration scope to modify.
     """
-    if arch is None:
-        arch = spack.architecture.sys_type()
-
-    compiler_config = get_compiler_config(arch, scope)
+    compiler_config = get_compiler_config(scope)
     for compiler in compilers:
-        compiler_config[str(compiler.spec)] = dict(
-            (c, getattr(compiler, c, "None"))
-            for c in _required_instance_vars)
-
-    update = { arch : compiler_config }
-    spack.config.update_config('compilers', update, scope)
+        compiler_config.append(_to_dict(compiler))
+    global _cache_config_file
+    _cache_config_file = compiler_config
+    spack.config.update_config('compilers', compiler_config, scope)
 
 
 @_auto_compiler_spec
-def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
+def remove_compiler_from_config(compiler_spec, scope=None):
     """Remove compilers from the config, by spec.
 
     Arguments:
       - compiler_specs: a list of CompilerSpec objects.
-      - arch:           arch to add compilers for.
       - scope:          configuration scope to modify.
     """
-    if arch is None:
-        arch = spack.architecture.sys_type()
-
-    compiler_config = get_compiler_config(arch, scope)
-    del compiler_config[str(compiler_spec)]
-    update = { arch : compiler_config }
-
-    spack.config.update_config('compilers', update, scope)
-
-
-def all_compilers_config(arch=None, scope=None):
+    compiler_config = get_compiler_config(scope)
+    config_length = len(compiler_config)
+    
+    filtered_compiler_config = [comp for comp in compiler_config 
+               if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
+    # Need a better way for this
+    global _cache_config_file
+    _cache_config_file = filtered_compiler_config # Update the cache for changes
+    if len(filtered_compiler_config) == config_length: # No items removed
+        CompilerSpecInsufficientlySpecificError(compiler_spec)
+    spack.config.update_config('compilers', filtered_compiler_config, scope)
+
+
+def all_compilers_config(scope=None):
     """Return a set of specs for all the compiler versions currently
        available to build with.  These are instances of CompilerSpec.
     """
     # Get compilers for this architecture.
-    arch_config = get_compiler_config(arch, scope)
-
-    # Merge 'all' compilers with arch-specific ones.
-    # Arch-specific compilers have higher precedence.
-    merged_config = get_compiler_config('all', scope=scope)
-    merged_config = spack.config._merge_yaml(merged_config, arch_config)
-
-    return merged_config
+    global _cache_config_file #Create a cache of the config file so we don't load all the time.
+    if not _cache_config_file:
+        _cache_config_file = get_compiler_config(scope)
+        return _cache_config_file
+    else:
+        return _cache_config_file
 
 
-def all_compilers(arch=None, scope=None):
+def all_compilers(scope=None):
     # Return compiler specs from the merged config.
-    return [spack.spec.CompilerSpec(s)
-            for s in all_compilers_config(arch, scope)]
+    return [spack.spec.CompilerSpec(s['compiler']['spec'])
+            for s in all_compilers_config(scope)]
 
 
 def default_compiler():
@@ -179,36 +184,18 @@ def default_compiler():
     return sorted(versions)[-1]
 
 
-def find_compilers(*path):
+def find_compilers(*paths):
     """Return a list of compilers found in the suppied paths.
-       This invokes the find() method for each Compiler class,
-       and appends the compilers detected to a list.
+       This invokes the find_compilers() method for each operating
+       system associated with the host platform, and appends
+       the compilers detected to a list.
     """
-    # Make sure path elements exist, and include /bin directories
-    # under prefixes.
-    filtered_path = []
-    for p in path:
-        # Eliminate symlinks and just take the real directories.
-        p = os.path.realpath(p)
-        if not os.path.isdir(p):
-            continue
-        filtered_path.append(p)
-
-        # Check for a bin directory, add it if it exists
-        bin = join_path(p, 'bin')
-        if os.path.isdir(bin):
-            filtered_path.append(os.path.realpath(bin))
-
-    # Once the paths are cleaned up, do a search for each type of
-    # compiler.  We can spawn a bunch of parallel searches to reduce
-    # the overhead of spelunking all these directories.
-    types = all_compiler_types()
-    compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
-
-    # ensure all the version calls we made are cached in the parent
-    # process, as well.  This speeds up Spack a lot.
-    clist = reduce(lambda x,y: x+y, compiler_lists)
-    return clist
+    # Find compilers for each operating system class
+    oss = all_os_classes()
+    compiler_lists = []
+    for o in oss:
+        compiler_lists.extend(o.find_compilers(*paths))
+    return compiler_lists
 
 
 def supported_compilers():
@@ -227,51 +214,83 @@ def supported(compiler_spec):
 
 
 @_auto_compiler_spec
-def find(compiler_spec, arch=None, scope=None):
+def find(compiler_spec, scope=None):
     """Return specs of available compilers that match the supplied
        compiler spec.  Return an list if nothing found."""
-    return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
+    return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
 
 
 @_auto_compiler_spec
-def compilers_for_spec(compiler_spec, arch=None, scope=None):
+def compilers_for_spec(compiler_spec, scope=None, **kwargs):
     """This gets all compilers that satisfy the supplied CompilerSpec.
        Returns an empty list if none are found.
     """
-    config = all_compilers_config(arch, scope)
+    platform = kwargs.get("platform", None)
+    config = all_compilers_config(scope)
+
+    def get_compilers(cspec):
+        compilers = []
 
-    def get_compiler(cspec):
-        items = config[str(cspec)]
+        for items in config:
+            if items['compiler']['spec'] != str(cspec):
+                continue
+            items = items['compiler']
 
-        if not all(n in items for n in _required_instance_vars):
-            raise InvalidCompilerConfigurationError(cspec)
+            if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)):
+                raise InvalidCompilerConfigurationError(cspec)
 
-        cls  = class_for_compiler_name(cspec.name)
-        compiler_paths = []
-        for c in _required_instance_vars:
-            compiler_path = items[c]
-            if compiler_path != "None":
-                compiler_paths.append(compiler_path)
+            cls  = class_for_compiler_name(cspec.name)
+
+            compiler_paths = []
+            for c in _path_instance_vars:
+                compiler_path = items['paths'][c]
+                if compiler_path != "None":
+                    compiler_paths.append(compiler_path)
+                else:
+                    compiler_paths.append(None)
+
+            mods = items.get('modules')
+            if mods == 'None':
+                mods = []
+
+            if 'operating_system' in items:
+                operating_system = spack.architecture._operating_system_from_dict(items['operating_system'], platform)
             else:
-                compiler_paths.append(None)
+                operating_system = None
+
 
-        flags = {}
-        for f in spack.spec.FlagMap.valid_compiler_flags():
-            if f in items:
-                flags[f] = items[f]
-        return cls(cspec, *compiler_paths, **flags)
+            alias = items['alias'] if 'alias' in items else None
 
-    matches = find(compiler_spec, arch, scope)
-    return [get_compiler(cspec) for cspec in matches]
+            flags = {}
+            for f in spack.spec.FlagMap.valid_compiler_flags():
+                if f in items:
+                    flags[f] = items[f]
+
+            compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias, **flags))
+
+        return compilers
+
+    matches = set(find(compiler_spec, scope))
+    compilers = []
+    for cspec in matches:
+        compilers.extend(get_compilers(cspec))
+    return compilers
+#    return [get_compilers(cspec) for cspec in matches]
 
 
 @_auto_compiler_spec
-def compiler_for_spec(compiler_spec):
+def compiler_for_spec(compiler_spec, arch):
     """Get the compiler that satisfies compiler_spec.  compiler_spec must
        be concrete."""
+    operating_system = arch.platform_os
     assert(compiler_spec.concrete)
-    compilers = compilers_for_spec(compiler_spec)
-    assert(len(compilers) == 1)
+
+    compilers = [c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
+                if c.operating_system == operating_system]
+    if len(compilers) < 1:
+        raise NoCompilerForSpecError(compiler_spec, operating_system)
+    if len(compilers) > 1:
+        raise CompilerSpecInsufficientlySpecificError(compiler_spec)
     return compilers[0]
 
 
@@ -289,6 +308,19 @@ def class_for_compiler_name(compiler_name):
     return cls
 
 
+def all_os_classes():
+    """
+    Return the list of classes for all operating systems available on
+    this platform
+    """
+    classes = []
+
+    platform = spack.architecture.sys_type()
+    for os_class in platform.operating_sys.values():
+        classes.append(os_class)
+
+    return classes
+
 def all_compiler_types():
     return [class_for_compiler_name(c) for c in supported_compilers()]
 
@@ -298,9 +330,19 @@ def __init__(self, compiler_spec):
         super(InvalidCompilerConfigurationError, self).__init__(
             "Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
             "Compiler configuration must contain entries for all compilers: %s"
-            % _required_instance_vars)
+            % _path_instance_vars)
 
 
 class NoCompilersError(spack.error.SpackError):
     def __init__(self):
         super(NoCompilersError, self).__init__("Spack could not find any compilers!")
+
+class NoCompilerForSpecError(spack.error.SpackError):
+    def __init__(self, compiler_spec, target):
+        super(NoCompilerForSpecError, self).__init__("No compilers for operating system %s satisfy spec %s" % (
+                                                     target, compiler_spec))
+
+class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
+    def __init__(self, compiler_spec):
+        super(CompilerSpecInsufficientlySpecificError, self).__init__("Multiple compilers satisfy spec %s",
+                                                                      compiler_spec)
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index 072bcd065f68fdef4c8f8371ab35d534ba93a03d..00b406d82008320aa17c9f8e3992e85bb1c175cd 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -73,7 +73,7 @@ def cxx11_flag(self):
                 return "-std=c++11"
 
     @classmethod
-    def default_version(self, comp):
+    def default_version(cls, comp):
         """The '--version' option works for clang compilers.
            On most platforms, output looks like this::
 
diff --git a/lib/spack/spack/compilers/craype.py b/lib/spack/spack/compilers/craype.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ba8b110ecc4de03030344aa9dd5b9b36e930344
--- /dev/null
+++ b/lib/spack/spack/compilers/craype.py
@@ -0,0 +1,58 @@
+##############################################################################}
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import llnl.util.tty as tty
+
+#from spack.build_environment import load_module
+from spack.compiler import *
+#from spack.version import ver
+
+class Craype(Compiler):
+    # Subclasses use possible names of C compiler
+    cc_names = ['cc']
+
+    # Subclasses use possible names of C++ compiler
+    cxx_names = ['CC']
+
+    # Subclasses use possible names of Fortran 77 compiler
+    f77_names = ['ftn']
+
+    # Subclasses use possible names of Fortran 90 compiler
+    fc_names = ['ftn']
+
+    # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
+    suffixes = [r'-mp-\d\.\d']
+
+    PrgEnv = 'PrgEnv-cray'
+    PrgEnv_compiler = 'craype'
+
+    link_paths = { 'cc'  : 'cc',
+                   'cxx' : 'c++',
+                   'f77' : 'f77',
+                   'fc'  : 'fc'}
+ 
+    @classmethod
+    def default_version(cls, comp):
+        return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)')
+
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 164bddeb3f15f92fed127398ea12ea49531f4ca3..3f552eaecec095014704d5de58879bfe846d6c06 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -49,6 +49,9 @@ class Gcc(Compiler):
                   'f77' : 'gcc/gfortran',
                   'fc'  : 'gcc/gfortran' }
 
+    PrgEnv = 'PrgEnv-gnu'
+    PrgEnv_compiler = 'gcc'
+
     @property
     def openmp_flag(self):
         return "-fopenmp"
@@ -74,9 +77,9 @@ def fc_version(cls, fc):
         return get_compiler_version(
             fc, '-dumpversion',
             # older gfortran versions don't have simple dumpversion output.
-            r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
+            r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)', module)
 
 
     @classmethod
     def f77_version(cls, f77):
-        return cls.fc_version(f77)
+        return cls.fc_version(f77, module)
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 5007ece645f144958c15cc1bb997809eef72e393..6cad03ff47d3d9b624db3d60df08ec23b886e559 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -45,6 +45,9 @@ class Intel(Compiler):
                    'f77' : 'intel/ifort',
                    'fc'  : 'intel/ifort' }
 
+    PrgEnv = 'PrgEnv-intel'
+    PrgEnv_compiler = 'intel'
+
     @property
     def openmp_flag(self):
         if self.version < ver('16.0'):
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index d42148dc49ac32cc90cec81b78709e3206a77bce..6d36d8bfa6d3460edf51a2062b3e6d0b8b92193d 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -44,6 +44,12 @@ class Pgi(Compiler):
                    'f77' : 'pgi/pgfortran',
                    'fc'  : 'pgi/pgfortran' }
 
+
+
+    PrgEnv = 'PrgEnv-pgi'
+    PrgEnv_compiler = 'pgi'
+
+
     @property
     def openmp_flag(self):
         return "-mp"
@@ -52,7 +58,6 @@ def openmp_flag(self):
     def cxx11_flag(self):
         return "-std=c++11"
 
-
     @classmethod
     def default_version(cls, comp):
         """The '-V' option works for all the PGI compilers.
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index bda2de4b87d95b8c3cb9db620fc104cfd844e66f..b1431436add96c5f248b3ea0c527b8efaed8cd7e 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -56,8 +56,9 @@ def cxx11_flag(self):
         else:
             return "-qlanglvl=extended0x"
 
+
     @classmethod
-    def default_version(self, comp):
+    def default_version(cls, comp):
         """The '-qversion' is the standard option fo XL compilers.
            Output looks like this::
 
@@ -83,6 +84,7 @@ def default_version(self, comp):
         return get_compiler_version(
             comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
 
+
     @classmethod
     def fc_version(cls, fc):
         """The fortran and C/C++ versions of the XL compiler are always two units apart.
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 4f78bfc347f98607c8a8ca53f443e12a438fd6c9..1f37455c7705b1cefffb443ed68c14df3001324b 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -84,7 +84,8 @@ def _valid_virtuals_and_externals(self, spec):
             raise NoBuildError(spec)
 
         def cmp_externals(a, b):
-            if a.name != b.name:
+            if a.name != b.name and (not a.external or a.external_module and
+                    not b.external and b.external_module):
                 # We're choosing between different providers, so
                 # maintain order from provider sort
                 return candidates.index(a) - candidates.index(b)
@@ -187,31 +188,64 @@ def prefer_key(v):
 
         return True   # Things changed
 
+    def _concretize_operating_system(self, spec):
+        platform = spec.architecture.platform
+        if spec.architecture.platform_os is not None and isinstance(
+            spec.architecture.platform_os,spack.architecture.OperatingSystem):
+            return False
 
-    def concretize_architecture(self, spec):
-        """If the spec already had an architecture, return.  Otherwise if
-           the root of the DAG has an architecture, then use that.
-           Otherwise take the system's default architecture.
-
-           Intuition: Architectures won't be set a lot, and generally you
-           want the host system's architecture.  When architectures are
-           mised in a spec, it is likely because the tool requries a
-           cross-compiled component, e.g. for tools that run on BlueGene
-           or Cray machines.  These constraints will likely come directly
-           from packages, so require the user to be explicit if they want
-           to mess with the architecture, and revert to the default when
-           they're not explicit.
-        """
-        if spec.architecture is not None:
+        if spec.root.architecture and spec.root.architecture.platform_os:
+            if isinstance(spec.root.architecture.platform_os,spack.architecture.OperatingSystem):
+                spec.architecture.platform_os = spec.root.architecture.platform_os
+        else:
+            spec.architecture.platform_os = spec.architecture.platform.operating_system('default_os')
+        return True #changed
+
+    def _concretize_target(self, spec):
+        platform = spec.architecture.platform
+        if spec.architecture.target is not None and isinstance(
+                spec.architecture.target, spack.architecture.Target):
             return False
+        if spec.root.architecture and spec.root.architecture.target:
+            if isinstance(spec.root.architecture.target,spack.architecture.Target):
+                spec.architecture.target = spec.root.architecture.target
+        else:
+            spec.architecture.target = spec.architecture.platform.target('default_target')
+        return True #changed
 
-        if spec.root.architecture:
-            spec.architecture = spec.root.architecture
+    def _concretize_platform(self, spec):
+        if spec.architecture.platform is not None and isinstance(
+                spec.architecture.platform, spack.architecture.Platform):
+            return False
+        if spec.root.architecture and spec.root.architecture.platform:
+            if isinstance(spec.root.architecture.platform,spack.architecture.Platform):
+                spec.architecture.platform = spec.root.architecture.platform
         else:
-            spec.architecture = spack.architecture.sys_type()
+            spec.architecture.platform = spack.architecture.sys_type()
+        return True #changed?
+
+    def concretize_architecture(self, spec):
+        """If the spec is empty provide the defaults of the platform. If the
+        architecture is not a basestring, then check if either the platform,
+        target or operating system are concretized. If any of the fields are
+        changed then return True. If everything is concretized (i.e the
+        architecture attribute is a namedtuple of classes) then return False.
+        If the target is a string type, then convert the string into a
+        concretized architecture. If it has no architecture and the root of the
+        DAG has an architecture, then use the root otherwise use the defaults
+        on the platform.
+        """
+        if spec.architecture is None:
+            # Set the architecture to all defaults
+            spec.architecture = spack.architecture.Arch()
+            return True
+
+            # Concretize the operating_system and target based of the spec
+        ret =  any((self._concretize_platform(spec),
+                    self._concretize_operating_system(spec),
+                    self._concretize_target(spec)))
+        return ret
 
-        assert(spec.architecture is not None)
-        return True   # changed
 
 
     def concretize_variants(self, spec):
@@ -238,6 +272,23 @@ def concretize_compiler(self, spec):
            build with the compiler that will be used by libraries that
            link to this one, to maximize compatibility.
         """
+        # Pass on concretizing the compiler if the target is not yet determined
+        if not spec.architecture.platform_os:
+            #Although this usually means changed, this means awaiting other changes
+            return True
+
+        # Only use a matching compiler if it is of the proper style
+        # Takes advantage of the proper logic already existing in compiler_for_spec
+        # Should think whether this can be more efficient
+        def _proper_compiler_style(cspec, arch):
+            platform = arch.platform
+            compilers = spack.compilers.compilers_for_spec(cspec,
+                                                           platform=platform)
+            return filter(lambda c: c.operating_system ==
+                                    arch.platform_os, compilers)
+            #return compilers
+
+
         all_compilers = spack.compilers.all_compilers()
 
         if (spec.compiler and
@@ -247,6 +298,7 @@ def concretize_compiler(self, spec):
 
         #Find the another spec that has a compiler, or the root if none do
         other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
+
         if not other_spec:
             other_spec = spec.root
         other_compiler = other_spec.compiler
@@ -265,7 +317,12 @@ def concretize_compiler(self, spec):
             raise UnavailableCompilerVersionError(other_compiler)
 
         # copy concrete version into other_compiler
-        spec.compiler = matches[0].copy()
+        index = 0
+        while not _proper_compiler_style(matches[index], spec.architecture):
+            index += 1
+            if index == len(matches) - 1:
+                raise NoValidVersionError(spec)
+        spec.compiler = matches[index].copy()
         assert(spec.compiler.concrete)
         return True  # things changed.
 
@@ -276,15 +333,21 @@ def concretize_compiler_flags(self, spec):
         compiler is used, defaulting to no compiler flags in the spec.
         Default specs set at the compiler level will still be added later.
         """
+
+            
+        if not spec.architecture.platform_os:
+            #Although this usually means changed, this means awaiting other changes
+            return True
+
         ret = False
         for flag in spack.spec.FlagMap.valid_compiler_flags():
             try:
                 nearest = next(p for p in spec.traverse(direction='parents')
                                if ((p.compiler == spec.compiler and p is not spec)
                                and flag in p.compiler_flags))
-                if ((not flag in spec.compiler_flags) or
-                    sorted(spec.compiler_flags[flag]) != sorted(nearest.compiler_flags[flag])):
-                    if flag in spec.compiler_flags:
+                if not flag in spec.compiler_flags or \
+                    not (sorted(spec.compiler_flags[flag]) >= sorted(nearest.compiler_flags[flag])):
+                    if flag in spec.compiler_flags: 
                         spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
                                                          set(nearest.compiler_flags[flag]))
                     else:
@@ -307,7 +370,7 @@ def concretize_compiler_flags(self, spec):
         # Include the compiler flag defaults from the config files
         # This ensures that spack will detect conflicts that stem from a change
         # in default compiler flags.
-        compiler = spack.compilers.compiler_for_spec(spec.compiler)
+        compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
         for flag in compiler.flags:
             if flag not in spec.compiler_flags:
                 spec.compiler_flags[flag] = compiler.flags[flag]
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index ec37bd290c9a9dae529c25618b17620f2ca910fa..db0787edc62553a22852c8eca61edb86efa37346 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -135,7 +135,7 @@
 
 # Hacked yaml for configuration files preserves line numbers.
 import spack.util.spack_yaml as syaml
-
+from spack.build_environment import get_path_from_module
 
 """Dict from section names -> schema for that section."""
 section_schemas = {
@@ -146,18 +146,17 @@
         'additionalProperties': False,
         'patternProperties': {
             'compilers:?': {  # optional colon for overriding site config.
-                'type': 'object',
-                'default': {},
-                'additionalProperties': False,
-                'patternProperties': {
-                    r'\w[\w-]*': {           # architecture
+                'type': 'array',
+                'items': {
+                    'compiler': {
                         'type': 'object',
                         'additionalProperties': False,
-                        'patternProperties': {
-                            r'\w[\w-]*@\w[\w-]*': {   # compiler spec
+                        'required': ['paths', 'spec', 'modules', 'operating_system'],
+                        'properties': {
+                            'paths': {
                                 'type': 'object',
-                                'additionalProperties': False,
                                 'required': ['cc', 'cxx', 'f77', 'fc'],
+                                'additionalProperties': False,
                                 'properties': {
                                     'cc':  { 'anyOf': [ {'type' : 'string' },
                                                         {'type' : 'null' }]},
@@ -167,8 +166,27 @@
                                                         {'type' : 'null' }]},
                                     'fc':  { 'anyOf': [ {'type' : 'string' },
                                                         {'type' : 'null' }]},
-                                },},},},},},},},
-
+                                    'cflags': { 'anyOf': [ {'type' : 'string' },
+                                                        {'type' : 'null' }]},
+                                    'cxxflags': { 'anyOf': [ {'type' : 'string' },
+                                                        {'type' : 'null' }]},
+                                    'fflags': { 'anyOf': [ {'type' : 'string' },
+                                                        {'type' : 'null' }]},
+                                    'cppflags': { 'anyOf': [ {'type' : 'string' },
+                                                        {'type' : 'null' }]},
+                                    'ldflags': { 'anyOf': [ {'type' : 'string' },
+                                                        {'type' : 'null' }]},
+                                    'ldlibs': { 'anyOf': [ {'type' : 'string' },
+                                                        {'type' : 'null' }]}}},
+                            'spec': { 'type': 'string'},
+                            'operating_system': { 'type': 'string'},
+                            'alias': { 'anyOf': [ {'type' : 'string'},
+                                                    {'type' : 'null' }]},
+                            'modules': { 'anyOf': [ {'type' : 'string'},
+                                                    {'type' : 'null' },
+                                                    {'type': 'array'},
+                                                    ]}
+                            },},},},},},
     'mirrors': {
         '$schema': 'http://json-schema.org/schema#',
         'title': 'Spack mirror configuration file schema',
@@ -194,7 +212,6 @@
                 'default': [],
                 'items': {
                     'type': 'string'},},},},
-
     'packages': {
         '$schema': 'http://json-schema.org/schema#',
         'title': 'Spack package configuration file schema',
@@ -224,6 +241,10 @@
                                 'type':  'boolean',
                                 'default': True,
                              },
+                            'modules': {
+                                'type' : 'object',
+                                'default' : {},
+                             },
                             'providers': {
                                 'type':  'object',
                                 'default': {},
@@ -563,8 +584,7 @@ def they_are(t):
 
     # Source list is prepended (for precedence)
     if they_are(list):
-        seen = set(source)
-        dest[:] = source + [x for x in dest if x not in seen]
+        dest[:] = source + [x for x in dest if x not in source]
         return dest
 
     # Source dict is merged into dest.
@@ -667,7 +687,8 @@ def spec_externals(spec):
 
     external_specs = []
     pkg_paths = allpkgs.get(name, {}).get('paths', None)
-    if not pkg_paths:
+    pkg_modules = allpkgs.get(name, {}).get('modules', None)
+    if (not pkg_paths) and (not pkg_modules):
         return []
 
     for external_spec, path in pkg_paths.iteritems():
@@ -678,6 +699,17 @@ def spec_externals(spec):
         external_spec = spack.spec.Spec(external_spec, external=path)
         if external_spec.satisfies(spec):
             external_specs.append(external_spec)
+
+    for external_spec, module in pkg_modules.iteritems():
+        if not module:
+            continue
+
+        path = get_path_from_module(module)
+
+        external_spec = spack.spec.Spec(external_spec, external=path, external_module=module)
+        if external_spec.satisfies(spec):
+            external_specs.append(external_spec)
+
     return external_specs
 
 
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index e768ddf5feb1391147745899da815ee0ad3dd7be..f941346bb13481dff3804298112975d34ce9e1d5 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -214,9 +214,10 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
 
         # Add dependencies from other records in the install DB to
         # form a full spec.
-        for dep_hash in spec_dict[spec.name]['dependencies'].values():
-            child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
-            spec._add_dependency(child)
+        if 'dependencies' in spec_dict[spec.name]:
+            for dep_hash in spec_dict[spec.name]['dependencies'].values():
+                child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
+                spec._add_dependency(child)
 
         # Specs from the database need to be marked concrete because
         # they represent actual installations.
@@ -289,7 +290,8 @@ def check(cond, msg):
             except Exception as e:
                 tty.warn("Invalid database reecord:",
                          "file:  %s" % self._index_path,
-                         "hash:  %s" % hash_key, "cause: %s" % str(e))
+                         "hash:  %s" % hash_key,
+                         "cause: %s: %s" % (type(e).__name__, str(e)))
                 raise
 
         self._data = data
@@ -309,7 +311,11 @@ def reindex(self, directory_layout):
                 for spec in directory_layout.all_specs():
                     # Create a spec for each known package and add it.
                     path = directory_layout.path_for_spec(spec)
-                    self._add(spec, path, directory_layout)
+                    old_info = old_data.get(spec.dag_hash())
+                    explicit = False
+                    if old_info is not None:
+                        explicit = old_info.explicit
+                    self._add(spec, path, directory_layout, explicit=explicit)
 
                 self._check_ref_counts()
 
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 51b26773e24be75a3b869d46c501a35e9ebeb597..ca8f21dc088e263110c28367ea4568989dd7ef2f 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -257,7 +257,7 @@ def variant(pkg, name, default=False, description=""):
     """Define a variant for the package. Packager can specify a default
     value (on or off) as well as a text description."""
 
-    default = bool(default)
+    default     = default
     description = str(description).strip()
 
     if not re.match(spack.spec.identifier_re, name):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 32d27d7bd048b8126c15335b23ae757f10134944..7e20365b0fdaec8aafd2513bab438e734eed8038 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -165,7 +165,7 @@ def remove_install_directory(self, spec):
 class YamlDirectoryLayout(DirectoryLayout):
     """Lays out installation directories like this::
            <install root>/
-               <architecture>/
+               <target>/
                    <compiler>-<compiler version>/
                        <name>-<version>-<variants>-<hash>
 
@@ -207,8 +207,7 @@ def relative_path_for_spec(self, spec):
             spec.version,
             spec.dag_hash(self.hash_len))
 
-        path = join_path(
-            spec.architecture,
+        path = join_path(spec.architecture,
             "%s-%s" % (spec.compiler.name, spec.compiler.version),
             dir_name)
 
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 2607d0a7f4049b92b01c83005973088e28710856..6f28ec34b25b42026667c954cbb49116c59532b9 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -1,4 +1,4 @@
-##############################################################################
+#
 # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
 # Produced at the Lawrence Livermore National Laboratory.
 #
@@ -21,7 +21,7 @@
 # You should have received a copy of the GNU Lesser General Public
 # License along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
+#
 """
 Fetch strategies are used to download source code into a staging area
 in order to build it.  They need to define the following methods:
@@ -75,11 +75,13 @@ def wrapper(self, *args, **kwargs):
 
 
 class FetchStrategy(object):
+
     """Superclass of all fetch strategies."""
     enabled = False  # Non-abstract subclasses should be enabled.
     required_attributes = None  # Attributes required in version() args.
 
     class __metaclass__(type):
+
         """This metaclass registers all fetch strategies in a list."""
 
         def __init__(cls, name, bases, dict):
@@ -126,6 +128,7 @@ def matches(cls, args):
 
 @pattern.composite(interface=FetchStrategy)
 class FetchStrategyComposite(object):
+
     """
     Composite for a FetchStrategy object. Implements the GoF composite pattern.
     """
@@ -134,6 +137,7 @@ class FetchStrategyComposite(object):
 
 
 class URLFetchStrategy(FetchStrategy):
+
     """FetchStrategy that pulls source code from a URL for an archive,
        checks the archive against a checksum,and decompresses the archive.
     """
@@ -235,12 +239,13 @@ def fetch(self):
         # redirects properly.
         content_types = re.findall(r'Content-Type:[^\r\n]+', headers)
         if content_types and 'text/html' in content_types[-1]:
-            tty.warn(
-                "The contents of " + self.archive_file + " look like HTML.",
-                "The checksum will likely be bad.  If it is, you can use",
-                "'spack clean <package>' to remove the bad archive, then fix",
-                "your internet gateway issue and install again.")
-
+            tty.warn("The contents of ",
+                     (self.archive_file if self.archive_file is not None
+                      else "the archive"),
+                     " look like HTML.",
+                     "The checksum will likely be bad.  If it is, you can use",
+                     "'spack clean <package>' to remove the bad archive, then",
+                     "fix your internet gateway issue and install again.")
         if save_file:
             os.rename(partial_file, save_file)
 
@@ -371,6 +376,7 @@ def fetch(self):
 
 
 class VCSFetchStrategy(FetchStrategy):
+
     def __init__(self, name, *rev_types, **kwargs):
         super(VCSFetchStrategy, self).__init__()
         self.name = name
@@ -425,6 +431,7 @@ def __repr__(self):
 
 
 class GoFetchStrategy(VCSFetchStrategy):
+
     """
     Fetch strategy that employs the `go get` infrastructure
     Use like this in a package:
@@ -484,6 +491,7 @@ def __str__(self):
 
 
 class GitFetchStrategy(VCSFetchStrategy):
+
     """
     Fetch strategy that gets source code from a git repository.
     Use like this in a package:
@@ -604,6 +612,7 @@ def __str__(self):
 
 
 class SvnFetchStrategy(VCSFetchStrategy):
+
     """Fetch strategy that gets source code from a subversion repository.
        Use like this in a package:
 
@@ -680,6 +689,7 @@ def __str__(self):
 
 
 class HgFetchStrategy(VCSFetchStrategy):
+
     """
     Fetch strategy that gets source code from a Mercurial repository.
     Use like this in a package:
@@ -850,11 +860,13 @@ def fetcher(self, targetPath, digest):
 
 
 class FetchError(spack.error.SpackError):
+
     def __init__(self, msg, long_msg=None):
         super(FetchError, self).__init__(msg, long_msg)
 
 
 class FailedDownloadError(FetchError):
+
     """Raised wen a download fails."""
 
     def __init__(self, url, msg=""):
@@ -864,16 +876,19 @@ def __init__(self, url, msg=""):
 
 
 class NoArchiveFileError(FetchError):
+
     def __init__(self, msg, long_msg):
         super(NoArchiveFileError, self).__init__(msg, long_msg)
 
 
 class NoDigestError(FetchError):
+
     def __init__(self, msg, long_msg=None):
         super(NoDigestError, self).__init__(msg, long_msg)
 
 
 class InvalidArgsError(FetchError):
+
     def __init__(self, pkg, version):
         msg = ("Could not construct a fetch strategy for package %s at "
                "version %s")
@@ -882,6 +897,7 @@ def __init__(self, pkg, version):
 
 
 class ChecksumError(FetchError):
+
     """Raised when archive fails to checksum."""
 
     def __init__(self, message, long_msg=None):
@@ -889,6 +905,7 @@ def __init__(self, message, long_msg=None):
 
 
 class NoStageError(FetchError):
+
     """Raised when fetch operations are called before set_stage()."""
 
     def __init__(self, method):
diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py
index 0f63b0e05ab3be1a9ebf0f3f939bca95ce5c0501..9010b841549c376d5bcd3c2ee3a666d966c835ac 100644
--- a/lib/spack/spack/hooks/licensing.py
+++ b/lib/spack/spack/hooks/licensing.py
@@ -26,7 +26,7 @@
 
 import spack
 import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
+from llnl.util.filesystem import join_path, mkdirp
 
 
 def pre_install(pkg):
@@ -154,6 +154,9 @@ def symlink_license(pkg):
     target = pkg.global_license_file
     for filename in pkg.license_files:
         link_name = join_path(pkg.prefix, filename)
+        license_dir = os.path.dirname(link_name)
+        if not os.path.exists(license_dir):
+            mkdirp(license_dir)
         if os.path.exists(target):
             os.symlink(target, link_name)
             tty.msg("Added local symlink %s to global license file" %
diff --git a/lib/spack/spack/operating_systems/__init__.py b/lib/spack/spack/operating_systems/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py
new file mode 100644
index 0000000000000000000000000000000000000000..c160a60be82f3bc35c424f39ff31fcd5e2263c4b
--- /dev/null
+++ b/lib/spack/spack/operating_systems/cnl.py
@@ -0,0 +1,62 @@
+import re
+import os
+
+from spack.architecture import OperatingSystem
+from spack.util.executable import *
+import spack.spec
+from spack.util.multiproc import parmap
+import spack.compilers
+
+class Cnl(OperatingSystem):
+    """ Compute Node Linux (CNL) is the operating system used for the Cray XC
+    series super computers. It is a very stripped down version of GNU/Linux.
+    Any compilers found through this operating system will be used with
+    modules. If updated, user must make sure that version and name are 
+    updated to indicate that OS has been upgraded (or downgraded)
+    """
+    def __init__(self):
+        name = 'CNL'
+        version = '10'
+        super(Cnl, self).__init__(name, version)
+
+
+    def find_compilers(self, *paths):
+        types = spack.compilers.all_compiler_types()
+        compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
+
+        # ensure all the version calls we made are cached in the parent
+        # process, as well.  This speeds up Spack a lot.
+        clist = reduce(lambda x,y: x+y, compiler_lists)
+        return clist
+
+
+    def find_compiler(self, cmp_cls, *paths):
+        compilers = []
+        if cmp_cls.PrgEnv:
+            if not cmp_cls.PrgEnv_compiler:
+                tty.die('Must supply PrgEnv_compiler with PrgEnv')
+
+            modulecmd = which('modulecmd')
+            modulecmd.add_default_arg('python')
+
+            # Save the environment variable to restore later
+            old_modulepath = os.environ['MODULEPATH']
+            # if given any explicit paths, search them for module files too
+            if paths:
+                module_paths = ':' + ':'.join(p for p in paths)
+                os.environ['MODULEPATH'] = module_paths
+        
+            output = modulecmd('avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
+            matches = re.findall(r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
+            for name, version in matches:
+                v = version
+                comp = cmp_cls(spack.spec.CompilerSpec(name + '@' + v), self,
+                           ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name +'/' + v])
+
+                compilers.append(comp)
+
+            # Restore modulepath environment variable
+            if paths:
+                os.environ['MODULEPATH'] = old_modulepath
+
+        return compilers
diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e3c72719b21aa9de1ed12eb116132123b7e3597
--- /dev/null
+++ b/lib/spack/spack/operating_systems/linux_distro.py
@@ -0,0 +1,22 @@
+import re
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+class LinuxDistro(OperatingSystem):
+    """ This class will represent the autodetected operating system
+        for a Linux System. Since there are many different flavors of
+        Linux, this class will attempt to encompass them all through
+        autodetection using the python module platform and the method
+        platform.dist()
+    """
+    def __init__(self):
+        distname, version, _ = py_platform.linux_distribution(
+            full_distribution_name=False)
+
+        # Grabs major version from tuple on redhat; on other platforms
+        # grab the first legal identifier in the version field.  On
+        # debian you get things like 'wheezy/sid'; sid means unstable.
+        # We just record 'wheezy' and don't get quite so detailed.
+        version = re.split(r'[^\w-]', version)[0]
+
+        super(LinuxDistro, self).__init__(distname, version)
diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py
new file mode 100644
index 0000000000000000000000000000000000000000..f35b3ca5771ccb1512bae645f39923bff8d6209b
--- /dev/null
+++ b/lib/spack/spack/operating_systems/mac_os.py
@@ -0,0 +1,29 @@
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+class MacOs(OperatingSystem):
+    """This class represents the macOS operating system. This will be
+    auto detected using the python platform.mac_ver. The macOS
+    platform will be represented using the major version operating
+    system name, i.e el capitan, yosemite...etc.
+    """
+
+    def __init__(self):
+        """ Autodetects the mac version from a dictionary. Goes back as
+            far as 10.6 snowleopard. If the user has an older mac then
+            the version will just be a generic mac_os.
+        """
+        mac_releases = {'10.6': "snowleopard",
+                        "10.7": "lion",
+                        "10.8": "mountainlion",
+                        "10.9": "mavericks",
+                        "10.10": "yosemite",
+                        "10.11": "elcapitan",
+                        "10.12": "sierra"}
+
+        mac_ver = py_platform.mac_ver()[0][:-2]
+        name = mac_releases.get(mac_ver, "macos")
+        super(MacOs, self).__init__(name, mac_ver)
+
+    def __str__(self):
+        return self.name
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index cbf50e56f600145f33c26efff4a699ecb1b61208..c5250d17c0c6d345d169b876bc026e24a94f5f08 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -397,14 +397,20 @@ def __init__(self, spec):
         if self.is_extension:
             spack.repo.get(self.extendee_spec)._check_extendable()
 
+    @property
+    def global_license_dir(self):
+        """Returns the directory where global license files for all
+           packages are stored."""
+        spack_root = ancestor(__file__, 4)
+        return join_path(spack_root, 'etc', 'spack', 'licenses')
+
     @property
     def global_license_file(self):
-        """Returns the path where a global license file should be stored."""
+        """Returns the path where a global license file for this
+           particular package should be stored."""
         if not self.license_files:
             return
-        spack_root = ancestor(__file__, 4)
-        global_license_dir = join_path(spack_root, 'etc', 'spack', 'licenses')
-        return join_path(global_license_dir, self.name,
+        return join_path(self.global_license_dir, self.name,
                          os.path.basename(self.license_files[0]))
 
     @property
@@ -676,11 +682,13 @@ def prefix(self):
         return self.spec.prefix
 
     @property
+    #TODO: Change this to architecture
     def compiler(self):
         """Get the spack.compiler.Compiler object used to build this package"""
         if not self.spec.concrete:
             raise ValueError("Can only get a compiler for a concrete package.")
-        return spack.compilers.compiler_for_spec(self.spec.compiler)
+        return spack.compilers.compiler_for_spec(self.spec.compiler, 
+                self.spec.architecture)
 
     def url_version(self, version):
         """
diff --git a/lib/spack/spack/platforms/__init__.py b/lib/spack/spack/platforms/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/lib/spack/spack/platforms/bgq.py b/lib/spack/spack/platforms/bgq.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0eb76f336351a222a797bd11c4009e3126c6b24
--- /dev/null
+++ b/lib/spack/spack/platforms/bgq.py
@@ -0,0 +1,18 @@
+import os
+from spack.architecture import Platform, Target
+
+class Bgq(Platform):
+    priority    = 30
+    front_end   = 'power7'
+    back_end    = 'powerpc'
+    default     = 'powerpc'
+
+    def __init__(self):
+        super(Bgq, self).__init__('bgq')
+        self.add_target(self.front_end, Target(self.front_end))
+        self.add_target(self.back_end, Target(self.back_end,))
+
+    @classmethod
+    def detect(self):
+        return os.path.exists('/bgsys')
+    
diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py
new file mode 100644
index 0000000000000000000000000000000000000000..e710303e23a32abcd4e987ad0ca8ae095b666dbb
--- /dev/null
+++ b/lib/spack/spack/platforms/cray_xc.py
@@ -0,0 +1,46 @@
+import os
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+
+class CrayXc(Platform):
+    priority    = 20
+    front_end   = 'sandybridge'
+    back_end    = 'ivybridge'
+    default     = 'ivybridge'
+
+    front_os    = "SuSE11"
+    back_os     = "CNL10"
+    default_os  = "CNL10" 
+
+    def __init__(self):
+        ''' Since cori doesn't have ivybridge as a front end it's better
+            if we use CRAY_CPU_TARGET as the default. This will ensure
+            that if we're on a XC-40 or XC-30 then we can detect the target
+        '''
+        super(CrayXc, self).__init__('cray_xc')
+
+        # Handle the default here so we can check for a key error
+        if 'CRAY_CPU_TARGET' in os.environ:
+            self.default = os.environ['CRAY_CPU_TARGET']
+
+        # Change the defaults to haswell if we're on an XC40
+        if self.default == 'haswell':
+            self.front_end = self.default
+            self.back_end = self.default
+
+        # Could switch to use modules and fe targets for front end
+        # Currently using compilers by path for front end.
+        self.add_target('sandybridge', Target('sandybridge'))
+        self.add_target('ivybridge', 
+                        Target('ivybridge', 'craype-ivybridge'))
+        self.add_target('haswell', 
+                        Target('haswell','craype-haswell'))         
+
+        self.add_operating_system('SuSE11', LinuxDistro())
+        self.add_operating_system('CNL10', Cnl())
+
+    @classmethod
+    def detect(self):
+        return os.path.exists('/opt/cray/craype')
+
diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py
new file mode 100644
index 0000000000000000000000000000000000000000..d47dd640f99001a80e56214217da9dfa694e5693
--- /dev/null
+++ b/lib/spack/spack/platforms/darwin.py
@@ -0,0 +1,26 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.mac_os import MacOs
+
+class Darwin(Platform):
+    priority    = 89
+    front_end   = 'x86_64'
+    back_end    = 'x86_64'
+    default     = 'x86_64'
+
+    def __init__(self):
+        super(Darwin, self).__init__('darwin')
+        self.add_target(self.default, Target(self.default))
+        mac_os = MacOs()
+
+        self.default_os = str(mac_os)
+        self.front_os   = str(mac_os)
+        self.back_os    = str(mac_os)
+
+        self.add_operating_system(str(mac_os), mac_os)
+
+    @classmethod
+    def detect(self):
+        platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE)
+        platform, _ = platform.communicate()
+        return 'darwin' in platform.strip().lower()
diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py
new file mode 100644
index 0000000000000000000000000000000000000000..4d8adac384990fe513f0aba2742587062a7bacf0
--- /dev/null
+++ b/lib/spack/spack/platforms/linux.py
@@ -0,0 +1,24 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+
+class Linux(Platform):
+    priority    = 90
+    front_end   = 'x86_64'
+    back_end    = 'x86_64'
+    default     = 'x86_64'
+
+    def __init__(self):
+        super(Linux, self).__init__('linux')
+        self.add_target(self.default, Target(self.default))
+        linux_dist = LinuxDistro()
+        self.default_os = str(linux_dist)
+        self.front_os = self.default_os
+        self.back_os = self.default_os
+        self.add_operating_system(str(linux_dist), linux_dist)
+
+    @classmethod
+    def detect(self):
+        platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE)
+        platform, _ = platform.communicate()
+        return 'linux' in platform.strip().lower()
diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..8fa2585a7a185df0bb642b64f79cbb805900130c
--- /dev/null
+++ b/lib/spack/spack/platforms/test.py
@@ -0,0 +1,28 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+
+
+class Test(Platform):
+    priority    = 1000000
+    front_end   = 'x86_32'
+    back_end    = 'x86_64'
+    default     = 'x86_64'
+    
+    back_os = 'CNL10'
+    default_os = 'CNL10'
+
+    def __init__(self):
+        super(Test, self).__init__('test')
+        self.add_target(self.default, Target(self.default))
+        self.add_target(self.front_end, Target(self.front_end))
+
+        self.add_operating_system(self.default_os, Cnl())
+        linux_dist = LinuxDistro()
+        self.front_os = linux_dist.name
+        self.add_operating_system(self.front_os, linux_dist)
+
+    @classmethod
+    def detect(self):
+        return True
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 7c09af4c210169b1c9de88c029783a6781d76056..54219ec1b4e73b565ba86604b8ddcf7c0d7bcd28 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1,4 +1,4 @@
-#
+##############################################################################
 # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
 # Produced at the Lawrence Livermore National Laboratory.
 #
@@ -18,10 +18,10 @@
 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
 # conditions of the GNU Lesser General Public License for more details.
 #
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
 """
 Spack allows very fine-grained control over how packages are installed and
 over how they are built and configured.  To make this easy, it has its own
@@ -96,8 +96,10 @@
 expansion when it is the first character in an id typed on the command line.
 """
 import sys
+import itertools
 import hashlib
 import base64
+import imp
 from StringIO import StringIO
 from operator import attrgetter
 import yaml
@@ -106,16 +108,22 @@
 import llnl.util.tty as tty
 from llnl.util.lang import *
 from llnl.util.tty.color import *
+from llnl.util.filesystem import join_path
 
 import spack
+import spack.architecture
 import spack.parse
 import spack.error
 import spack.compilers as compilers
 
+# TODO: move display_specs to some other location.
+from spack.cmd.find import display_specs
 from spack.version import *
 from spack.util.string import *
 from spack.util.prefix import Prefix
+from spack.util.naming import mod_to_class
 from spack.virtual import ProviderIndex
+from spack.build_environment import get_path_from_module, load_module
 
 # Valid pattern for an identifier in Spack
 identifier_re = r'\w[\w-]*'
@@ -165,7 +173,6 @@ def colorize_spec(spec):
     """Returns a spec colorized according to the colors specified in
        color_formats."""
     class insert_color:
-
         def __init__(self):
             self.last = None
 
@@ -183,11 +190,9 @@ def __call__(self, match):
 
 @key_ordering
 class CompilerSpec(object):
-
     """The CompilerSpec field represents the compiler or range of compiler
        versions that a package should be built with.  CompilerSpecs have a
        name and a version list. """
-
     def __init__(self, *args):
         nargs = len(args)
         if nargs == 1:
@@ -293,7 +298,6 @@ class VariantSpec(object):
        on the particular package being built, and each named variant can
        be enabled or disabled.
     """
-
     def __init__(self, name, value):
         self.name = name
         self.value = value
@@ -488,7 +492,8 @@ def __init__(self, spec_like, *dep_like, **kwargs):
         self._concrete = kwargs.get('concrete', False)
 
         # Allow a spec to be constructed with an external path.
-        self.external = kwargs.get('external', None)
+        self.external  = kwargs.get('external', None)
+        self.external_module = kwargs.get('external_module', None)
 
         # This allows users to construct a spec DAG with literals.
         # Note that given two specs a and b, Spec(a) copies a, but
@@ -520,8 +525,33 @@ def _add_flag(self, name, value):
         Known flags currently include "arch"
         """
         valid_flags = FlagMap.valid_compiler_flags()
-        if name == 'arch':
-            self._set_architecture(value)
+        if name == 'arch' or name == 'architecture':
+            parts = value.split('-')
+            if len(parts) == 3:
+                platform, op_sys, target = parts
+            else:
+                platform, op_sys, target = None, None, value
+
+            assert(self.architecture.platform is None)
+            assert(self.architecture.platform_os is None)
+            assert(self.architecture.target is None)
+            assert(self.architecture.os_string is None)
+            assert(self.architecture.target_string is None)
+            self._set_platform(platform)
+            self._set_os(op_sys)
+            self._set_target(target)
+        elif name == 'platform':
+            self._set_platform(value)
+        elif name == 'os' or name == 'operating_system':
+            if self.architecture.platform:
+                self._set_os(value)
+            else:
+                self.architecture.os_string = value
+        elif name == 'target':
+            if self.architecture.platform:
+                self._set_target(value)
+            else:
+                self.architecture.target_string = value
         elif name in valid_flags:
             assert(self.compiler_flags is not None)
             self.compiler_flags[name] = value.split()
@@ -535,12 +565,49 @@ def _set_compiler(self, compiler):
                 "Spec for '%s' cannot have two compilers." % self.name)
         self.compiler = compiler
 
-    def _set_architecture(self, architecture):
-        """Called by the parser to set the architecture."""
-        if self.architecture:
-            raise DuplicateArchitectureError(
-                "Spec for '%s' cannot have two architectures." % self.name)
-        self.architecture = architecture
+    def _set_platform(self, value):
+        """Called by the parser to set the architecture platform"""
+        if isinstance(value, basestring):
+            mod_path = spack.platform_path
+            mod_string = 'spack.platformss'
+            names = list_modules(mod_path)
+            if value in names:
+                # Create a platform object from the name
+                mod_name = mod_string + value
+                path = join_path(mod_path, value) + '.py'
+                mod = imp.load_source(mod_name, path)
+                class_name = mod_to_class(value)
+                if not hasattr(mod, class_name):
+                    tty.die('No class %s defined in %s' % (class_name, mod_name))
+                cls = getattr(mod, class_name)
+                if not inspect.isclass(cls):
+                    tty.die('%s.%s is not a class' % (mod_name, class_name))
+                platform = cls()
+            else:
+                tty.die("No platform class %s defined." % value)
+        else:
+            # The value is a platform
+            platform = value
+
+        self.architecture.platform = platform
+
+        # Set os and target if we previously got strings for them
+        if self.architecture.os_string:
+            self._set_os(self.architecture.os_string)
+            self.architecture.os_string = None
+        if self.architecture.target_string:
+            self._set_target(self.architecture.target_string)
+            self.architecture.target_string = None
+
+    def _set_os(self, value):
+        """Called by the parser to set the architecture operating system"""
+        if self.architecture.platform:
+            self.architecture.platform_os = self.architecture.platform.operating_system(value)
+
+    def _set_target(self, value):
+        """Called by the parser to set the architecture target"""
+        if self.architecture.platform:
+            self.architecture.target = self.architecture.platform.target(value)
 
     def _add_dependency(self, spec):
         """Called by the parser to add another spec as a dependency."""
@@ -612,15 +679,15 @@ def concrete(self):
         if self._concrete:
             return True
 
-        self._concrete = bool(not self.virtual and
-                              self.namespace is not None and
-                              self.versions.concrete and
-                              self.variants.concrete and
-                              self.architecture and
-                              self.compiler and
-                              self.compiler.concrete and
-                              self.compiler_flags.concrete and
-                              self.dependencies.concrete)
+        self._concrete = bool(not self.virtual
+                              and self.namespace is not None
+                              and self.versions.concrete
+                              and self.variants.concrete
+                              and self.architecture
+                              and self.architecture.concrete
+                              and self.compiler and self.compiler.concrete
+                              and self.compiler_flags.concrete
+                              and self.dependencies.concrete)
         return self._concrete
 
     def traverse(self, visited=None, d=0, **kwargs):
@@ -658,7 +725,7 @@ def traverse(self, visited=None, d=0, **kwargs):
                in the traversal.
 
            root     [=True]
-               If false, this won't yield the root node, just its descendents.
+               If False, this won't yield the root node, just its descendents.
 
            direction [=children|parents]
                If 'children', does a traversal of this spec's children.  If
@@ -753,10 +820,10 @@ def to_node_dict(self):
         params.update(dict((name, value)
                       for name, value in self.compiler_flags.items()))
         d = {
-            'parameters': params,
-            'arch': self.architecture,
-            'dependencies': dict((d, self.dependencies[d].dag_hash())
-                                 for d in sorted(self.dependencies)),
+            'parameters' : params,
+            'arch' : self.architecture,
+            'dependencies' : dict((d, self.dependencies[d].dag_hash())
+                                  for d in sorted(self.dependencies))
         }
 
         # Older concrete specs do not have a namespace.  Omit for
@@ -764,6 +831,13 @@ def to_node_dict(self):
         if not self.concrete or self.namespace:
             d['namespace'] = self.namespace
 
+        if self.architecture:
+            # TODO: Fix the target.to_dict to account for the tuple
+            # Want it to be a dict of dicts
+            d['arch'] = self.architecture.to_dict()
+        else:
+            d['arch'] = None
+
         if self.compiler:
             d.update(self.compiler.to_dict())
         else:
@@ -789,11 +863,12 @@ def from_node_dict(node):
         spec = Spec(name)
         spec.namespace = node.get('namespace', None)
         spec.versions = VersionList.from_dict(node)
-        spec.architecture = node['arch']
 
         if 'hash' in node:
             spec._hash = node['hash']
 
+        spec.architecture = spack.architecture.arch_from_dict(node['arch'])
+
         if node['compiler'] is None:
             spec.compiler = None
         else:
@@ -866,12 +941,10 @@ def _concretize_helper(self, presets=None, visited=None):
 
         # Concretize deps first -- this is a bottom-up process.
         for name in sorted(self.dependencies.keys()):
-            changed |= self.dependencies[
-                name]._concretize_helper(presets, visited)
+            changed |= self.dependencies[name]._concretize_helper(presets, visited)
 
         if self.name in presets:
             changed |= self.constrain(presets[self.name])
-
         else:
             # Concretize virtual dependencies last.  Because they're added
             # to presets below, their constraints will all be merged, but we'll
@@ -936,11 +1009,12 @@ def _expand_virtual_packages(self):
         """
         # Make an index of stuff this spec already provides
         self_index = ProviderIndex(self.traverse(), restrict=True)
-
         changed = False
         done = False
+
         while not done:
             done = True
+
             for spec in list(self.traverse()):
                 replacement = None
                 if spec.virtual:
@@ -979,24 +1053,25 @@ def _expand_virtual_packages(self):
                             continue
 
                 # If replacement is external then trim the dependencies
-                if replacement.external:
+                if replacement.external or replacement.external_module:
                     if (spec.dependencies):
                         changed = True
                         spec.dependencies = DependencyMap()
                     replacement.dependencies = DependencyMap()
+                    replacement.architecture = self.architecture
 
                 # TODO: could this and the stuff in _dup be cleaned up?
                 def feq(cfield, sfield):
                     return (not cfield) or (cfield == sfield)
 
-                if replacement is spec or (
-                        feq(replacement.name, spec.name) and
-                        feq(replacement.versions, spec.versions) and
-                        feq(replacement.compiler, spec.compiler) and
-                        feq(replacement.architecture, spec.architecture) and
-                        feq(replacement.dependencies, spec.dependencies) and
-                        feq(replacement.variants, spec.variants) and
-                        feq(replacement.external, spec.external)):
+                if replacement is spec or (feq(replacement.name, spec.name) and
+                    feq(replacement.versions, spec.versions) and
+                    feq(replacement.compiler, spec.compiler) and
+                    feq(replacement.architecture, spec.architecture) and
+                    feq(replacement.dependencies, spec.dependencies) and
+                    feq(replacement.variants, spec.variants) and
+                    feq(replacement.external, spec.external) and
+                    feq(replacement.external_module, spec.external_module)):
                     continue
                 # Refine this spec to the candidate. This uses
                 # replace_with AND dup so that it can work in
@@ -1053,6 +1128,15 @@ def concretize(self):
             if s.namespace is None:
                 s.namespace = spack.repo.repo_for_pkg(s.name).namespace
 
+
+        for s in self.traverse(root=False):
+            if s.external_module:
+                compiler = spack.compilers.compiler_for_spec(s.compiler, s.architecture)
+                for mod in compiler.modules:
+                    load_module(mod)
+
+                s.external = get_path_from_module(s.external_module)
+
         # Mark everything in the spec as concrete, as well.
         self._mark_concrete()
 
@@ -1253,7 +1337,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index):
 
         # if we descend into a virtual spec, there's nothing more
         # to normalize.  Concretize will finish resolving it later.
-        if self.virtual or self.external:
+        if self.virtual or self.external or self.external_module:
             return False
 
         # Combine constraints from package deps with constraints from
@@ -1300,7 +1384,6 @@ def normalize(self, force=False):
 
         # Ensure first that all packages & compilers in the DAG exist.
         self.validate_names()
-
         # Get all the dependencies into one DependencyMap
         spec_deps = self.flat_dependencies(copy=False)
 
@@ -1378,10 +1461,21 @@ def constrain(self, other, deps=True):
                 raise UnsatisfiableVariantSpecError(self.variants[v],
                                                     other.variants[v])
 
+        # TODO: Check out the logic here
         if self.architecture is not None and other.architecture is not None:
-            if self.architecture != other.architecture:
-                raise UnsatisfiableArchitectureSpecError(self.architecture,
-                                                         other.architecture)
+            if self.architecture.platform is not None and other.architecture.platform is not None:
+                if self.architecture.platform != other.architecture.platform:
+                    raise UnsatisfiableArchitectureSpecError(self.architecture,
+                                                             other.architecture)
+            if self.architecture.platform_os is not None and other.architecture.platform_os is not None:
+                if self.architecture.platform_os != other.architecture.platform_os:
+                    raise UnsatisfiableArchitectureSpecError(self.architecture,
+                                                             other.architecture)
+            if self.architecture.target is not None and other.architecture.target is not None:
+                if self.architecture.target != other.architecture.target:
+                    raise UnsatisfiableArchitectureSpecError(self.architecture,
+                                                             other.architecture)
+
 
         changed = False
         if self.compiler is not None and other.compiler is not None:
@@ -1395,9 +1489,17 @@ def constrain(self, other, deps=True):
 
         changed |= self.compiler_flags.constrain(other.compiler_flags)
 
-        old = self.architecture
-        self.architecture = self.architecture or other.architecture
-        changed |= (self.architecture != old)
+        old = str(self.architecture)
+        if self.architecture is None or other.architecture is None:
+            self.architecture = self.architecture or other.architecture
+        else:
+            if self.architecture.platform is None or other.architecture.platform is None:
+                self.architecture.platform = self.architecture.platform or other.architecture.platform
+            if self.architecture.platform_os is None or other.architecture.platform_os is None:
+                self.architecture.platform_os = self.architecture.platform_os or other.architecture.platform_os
+            if self.architecture.target is None or other.architecture.target is None:
+                self.architecture.target = self.architecture.target or other.architecture.target
+        changed |= (str(self.architecture) != old)
 
         if deps:
             changed |= self._constrain_dependencies(other)
@@ -1524,9 +1626,14 @@ def satisfies(self, other, deps=True, strict=False):
         # Architecture satisfaction is currently just string equality.
         # If not strict, None means unconstrained.
         if self.architecture and other.architecture:
-            if self.architecture != other.architecture:
+            if ((self.architecture.platform and other.architecture.platform and self.architecture.platform != other.architecture.platform) or
+                (self.architecture.platform_os and other.architecture.platform_os and self.architecture.platform_os != other.architecture.platform_os) or
+                (self.architecture.target and other.architecture.target and self.architecture.target != other.architecture.target)):
                 return False
-        elif strict and (other.architecture and not self.architecture):
+        elif strict and ((other.architecture and not self.architecture) or
+                         (other.architecture.platform and not self.architecture.platform) or
+                         (other.architecture.platform_os and not self.architecture.platform_os) or
+                         (other.architecture.target and not self.architecture.target)):
             return False
 
         if not self.compiler_flags.satisfies(
@@ -1601,20 +1708,17 @@ def _dup(self, other, **kwargs):
 
            Options:
            dependencies[=True]
-               Whether deps should be copied too.  Set to false to copy a
+               Whether deps should be copied too.  Set to False to copy a
                spec but not its dependencies.
         """
         # We don't count dependencies as changes here
         changed = True
         if hasattr(self, 'name'):
-            changed = (self.name != other.name and
-                       self.versions != other.versions and
-                       self.architecture != other.architecture and
-                       self.compiler != other.compiler and
-                       self.variants != other.variants and
-                       self._normal != other._normal and
-                       self.concrete != other.concrete and
-                       self.external != other.external)
+            changed = (self.name != other.name and self.versions != other.versions and \
+                       self.architecture != other.architecture and self.compiler != other.compiler and \
+                       self.variants != other.variants and self._normal != other._normal and \
+                       self.concrete != other.concrete and self.external != other.external and \
+                       self.external_module != other.external_module and self.compiler_flags != other.compiler_flags)
 
         # Local node attributes get copied first.
         self.name = other.name
@@ -1628,6 +1732,7 @@ def _dup(self, other, **kwargs):
         self.variants = other.variants.copy()
         self.variants.spec = self
         self.external = other.external
+        self.external_module = other.external_module
         self.namespace = other.namespace
         self._hash = other._hash
 
@@ -1648,6 +1753,7 @@ def _dup(self, other, **kwargs):
         self._normal = other._normal
         self._concrete = other._concrete
         self.external = other.external
+        self.external_module = other.external_module
         return changed
 
     def copy(self, **kwargs):
@@ -1752,6 +1858,7 @@ def _cmp_node(self):
                 self.compiler,
                 self.compiler_flags)
 
+
     def eq_node(self, other):
         """Equality with another spec, not including dependencies."""
         return self._cmp_node() == other._cmp_node()
@@ -1862,7 +1969,7 @@ def write(s, c):
                     if self.variants:
                         write(fmt % str(self.variants), c)
                 elif c == '=':
-                    if self.architecture:
+                    if self.architecture and str(self.architecture):
                         write(fmt % (' arch' + c + str(self.architecture)), c)
                 elif c == '#':
                     out.write('-' + fmt % (self.dag_hash(7)))
@@ -1920,8 +2027,8 @@ def write(s, c):
                     if self.variants:
                         write(fmt % str(self.variants), '+')
                 elif named_str == 'ARCHITECTURE':
-                    if self.architecture:
-                        write(fmt % str(self.architecture), '=')
+                    if self.architecture and str(self.architecture):
+                        write(fmt % str(self.architecture), ' arch=')
                 elif named_str == 'SHA1':
                     if self.dependencies:
                         out.write(fmt % str(self.dag_hash(7)))
@@ -1946,6 +2053,41 @@ def write(s, c):
     def dep_string(self):
         return ''.join("^" + dep.format() for dep in self.sorted_deps())
 
+
+    def __cmp__(self, other):
+        #Package name sort order is not configurable, always goes alphabetical
+        if self.name != other.name:
+            return cmp(self.name, other.name)
+
+        #Package version is second in compare order
+        pkgname = self.name
+        if self.versions != other.versions:
+            return spack.pkgsort.version_compare(pkgname,
+                         self.versions, other.versions)
+
+        #Compiler is third
+        if self.compiler != other.compiler:
+            return spack.pkgsort.compiler_compare(pkgname,
+                         self.compiler, other.compiler)
+
+        #Variants
+        if self.variants != other.variants:
+            return spack.pkgsort.variant_compare(pkgname,
+                         self.variants, other.variants)
+
+        #Target
+        if self.architecture != other.architecture:
+            return spack.pkgsort.architecture_compare(pkgname,
+                         self.architecture, other.architecture)
+
+        #Dependency is not configurable
+        if self.dependencies != other.dependencies:
+            return -1 if self.dependencies < other.dependencies else 1
+
+        #Equal specs
+        return 0
+
+
     def __str__(self):
         return self.format() + self.dep_string()
 
@@ -2015,15 +2157,17 @@ def __init__(self):
             (r'\s+', lambda scanner, val: None)])
 
 
+# Lexer is always the same for every parser.
+_lexer = SpecLexer()
+
 class SpecParser(spack.parse.Parser):
 
     def __init__(self):
-        super(SpecParser, self).__init__(SpecLexer())
+        super(SpecParser, self).__init__(_lexer)
         self.previous = None
 
     def do_parse(self):
         specs = []
-
         try:
             while self.next:
                 # TODO: clean this parsing up a bit
@@ -2067,6 +2211,12 @@ def do_parse(self):
         except spack.parse.ParseError, e:
             raise SpecParseError(e)
 
+
+        # If the spec has an os or a target and no platform, give it the default platform
+        for spec in specs:
+            for s in spec.traverse():
+                if s.architecture.os_string or s.architecture.target_string:
+                    s._set_platform(spack.architecture.sys_type())
         return specs
 
     def parse_compiler(self, text):
@@ -2108,9 +2258,10 @@ def spec(self, name, check_valid_token=False):
         spec.name = spec_name
         spec.versions = VersionList()
         spec.variants = VariantMap(spec)
-        spec.architecture = None
+        spec.architecture = spack.architecture.Arch()
         spec.compiler = None
         spec.external = None
+        spec.external_module = None
         spec.compiler_flags = FlagMap(spec)
         spec.dependents = DependencyMap()
         spec.dependencies = DependencyMap()
@@ -2186,12 +2337,6 @@ def variant(self, name=None):
             self.check_identifier()
             return self.token.value
 
-    def architecture(self):
-        # TODO: Make this work properly as a subcase of variant (includes
-        # adding names to grammar)
-        self.expect(ID)
-        return self.token.value
-
     def version(self):
         start = None
         end = None
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 1668e271fafedb6eb5e92b74598dda27e0f1f23c..fb91f24721405cc42d966123898745bd383957d6 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -31,15 +31,16 @@
 from llnl.util.tty.colify import colify
 from spack.test.tally_plugin import Tally
 """Names of tests to be included in Spack's test suite"""
-test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
+
+test_names = ['architecture', 'versions', 'url_parse', 'url_substitution', 'packages', 'stage',
               'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
               'multimethod', 'install', 'package_sanity', 'config',
               'directory_layout', 'pattern', 'python_version', 'git_fetch',
               'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
               'cc', 'link_tree', 'spec_yaml', 'optional_deps',
               'make_executable', 'configure_guess', 'lock', 'database',
-              'namespace_trie', 'yaml', 'sbang', 'environment',
-              'cmd.uninstall', 'cmd.test_install']
+              'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
+              'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd']
 
 
 def list_tests():
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6847c57445d143bc471a614db09807ad652b172
--- /dev/null
+++ b/lib/spack/spack/test/architecture.py
@@ -0,0 +1,112 @@
+""" Test checks if the architecture class is created correctly and also that
+    the functions are looking for the correct architecture name
+"""
+import unittest
+import os
+import platform as py_platform
+import spack
+from spack.architecture import *
+from spack.spec import *
+from spack.platforms.cray_xc import CrayXc
+from spack.platforms.linux import Linux
+from spack.platforms.bgq import Bgq
+from spack.platforms.darwin import Darwin
+
+from spack.test.mock_packages_test import *
+
+#class ArchitectureTest(unittest.TestCase):
+class ArchitectureTest(MockPackagesTest):
+
+    def setUp(self):
+        super(ArchitectureTest, self).setUp()
+        self.platform = sys_type()
+
+    def tearDown(self):
+        super(ArchitectureTest, self).tearDown()
+
+    def test_dict_functions_for_architecture(self):
+        arch = Arch()
+        arch.platform = spack.architecture.sys_type()
+        arch.platform_os = arch.platform.operating_system('default_os')
+        arch.target = arch.platform.target('default_target')
+
+        d = arch.to_dict()
+
+        new_arch = spack.architecture.arch_from_dict(d)
+
+        self.assertEqual(arch, new_arch)
+
+        self.assertTrue( isinstance(arch, Arch) )
+        self.assertTrue( isinstance(arch.platform, Platform) )
+        self.assertTrue( isinstance(arch.platform_os, OperatingSystem) )
+        self.assertTrue( isinstance(arch.target, Target) )
+        self.assertTrue( isinstance(new_arch, Arch) )
+        self.assertTrue( isinstance(new_arch.platform, Platform) )
+        self.assertTrue( isinstance(new_arch.platform_os, OperatingSystem) )
+        self.assertTrue( isinstance(new_arch.target, Target) )
+
+
+    def test_sys_type(self):
+        output_platform_class = sys_type()
+        my_arch_class = None
+        if os.path.exists('/opt/cray/craype'):
+            my_platform_class = CrayXc()
+        elif os.path.exists('/bgsys'):
+            my_platform_class = Bgq()
+        elif 'Linux' in py_platform.system():
+            my_platform_class = Linux()
+        elif 'Darwin' in py_platform.system():
+            my_platform_class = Darwin()
+
+        self.assertEqual(str(output_platform_class), str(my_platform_class))
+
+    def test_user_front_end_input(self):
+        """Test when user inputs just frontend that both the frontend target
+            and frontend operating system match
+        """
+        frontend_os = self.platform.operating_system("frontend")
+        frontend_target = self.platform.target("frontend")
+        frontend_spec = Spec("libelf os=frontend target=frontend")
+        frontend_spec.concretize()
+        self.assertEqual(frontend_os, frontend_spec.architecture.platform_os)
+        self.assertEqual(frontend_target, frontend_spec.architecture.target)
+
+    def test_user_back_end_input(self):
+        """Test when user inputs backend that both the backend target and
+            backend operating system match
+        """
+        backend_os = self.platform.operating_system("backend")
+        backend_target = self.platform.target("backend")
+        backend_spec = Spec("libelf os=backend target=backend")
+        backend_spec.concretize()
+        self.assertEqual(backend_os, backend_spec.architecture.platform_os)
+        self.assertEqual(backend_target, backend_spec.architecture.target)
+
+    def test_user_defaults(self):
+        default_os = self.platform.operating_system("default_os")
+        default_target = self.platform.target("default_target")
+
+        default_spec = Spec("libelf") # default is no args
+        default_spec.concretize()
+        self.assertEqual(default_os, default_spec.architecture.platform_os)
+        self.assertEqual(default_target, default_spec.architecture.target)
+
+    def test_user_input_combination(self):
+        os_list = self.platform.operating_sys.keys()
+        target_list = self.platform.targets.keys()
+        additional = ["fe", "be", "frontend", "backend"]
+
+        os_list.extend(additional)
+        target_list.extend(additional)
+
+        combinations = itertools.product(os_list, target_list)
+        results = []
+        for arch in combinations:
+            o,t = arch
+            spec = Spec("libelf os=%s target=%s" % (o, t))
+            spec.concretize()
+            results.append(spec.architecture.platform_os == self.platform.operating_system(o))
+            results.append(spec.architecture.target == self.platform.target(t))
+        res = all(results)
+
+        self.assertTrue(res)
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
new file mode 100644
index 0000000000000000000000000000000000000000..371e9650e04700c6135a4323cc81f67ca1221208
--- /dev/null
+++ b/lib/spack/spack/test/cmd/find.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+
+import spack.cmd.find
+import unittest
+
+
+class Bunch(object):
+
+    def __init__(self, **kwargs):
+        self.__dict__.update(kwargs)
+
+
+class FindTest(unittest.TestCase):
+
+    def test_query_arguments(self):
+        query_arguments = spack.cmd.find.query_arguments
+        # Default arguments
+        args = Bunch(only_missing=False, missing=False,
+                     unknown=False, explicit=False, implicit=False)
+        q_args = query_arguments(args)
+        self.assertTrue('installed' in q_args)
+        self.assertTrue('known' in q_args)
+        self.assertTrue('explicit' in q_args)
+        self.assertEqual(q_args['installed'], True)
+        self.assertEqual(q_args['known'], any)
+        self.assertEqual(q_args['explicit'], any)
+        # Check that explicit works correctly
+        args.explicit = True
+        q_args = query_arguments(args)
+        self.assertEqual(q_args['explicit'], True)
+        args.explicit = False
+        args.implicit = True
+        q_args = query_arguments(args)
+        self.assertEqual(q_args['explicit'], False)
+        args.explicit = True
+        self.assertRaises(SystemExit, query_arguments, args)
diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py
new file mode 100644
index 0000000000000000000000000000000000000000..d89814154b6ab1f0d3c5e3569b0de0d87ca7de02
--- /dev/null
+++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py
@@ -0,0 +1,81 @@
+import os
+import shutil
+from tempfile import mkdtemp
+
+from llnl.util.filesystem import set_executable, mkdirp
+
+import spack.spec
+import spack.cmd.compiler
+import spack.compilers
+from spack.version import Version
+from spack.test.mock_packages_test import *
+
+test_version = '4.5-spacktest'
+
+class MockArgs(object):
+    def __init__(self, add_paths=[], scope=None, compiler_spec=None, all=None):
+        self.add_paths = add_paths
+        self.scope = scope
+        self.compiler_spec = compiler_spec
+        self.all = all
+
+
+def make_mock_compiler():
+    """Make a directory containing a fake, but detectable compiler."""
+    mock_compiler_dir = mkdtemp()
+    bin_dir = os.path.join(mock_compiler_dir, 'bin')
+    mkdirp(bin_dir)
+
+    gcc_path = os.path.join(bin_dir, 'gcc')
+    gxx_path = os.path.join(bin_dir, 'g++')
+    gfortran_path = os.path.join(bin_dir, 'gfortran')
+
+    with open(gcc_path, 'w') as f:
+        f.write("""\
+#!/bin/sh
+
+for arg in "$@"; do
+    if [ "$arg" = -dumpversion ]; then
+        echo '%s'
+    fi
+done
+""" % test_version)
+
+    # Create some mock compilers in the temporary directory
+    set_executable(gcc_path)
+    shutil.copy(gcc_path, gxx_path)
+    shutil.copy(gcc_path, gfortran_path)
+
+    return mock_compiler_dir
+
+
+class CompilerCmdTest(MockPackagesTest):
+    """ Test compiler commands for add and remove """
+
+
+    def test_compiler_remove(self):
+        args = MockArgs(all=True, compiler_spec='gcc@4.5.0')
+        spack.cmd.compiler.compiler_remove(args)
+        compilers = spack.compilers.all_compilers()
+        self.assertTrue(spack.spec.CompilerSpec("gcc@4.5.0") not in compilers)
+
+
+    def test_compiler_add(self):
+        # compilers available by default.
+        old_compilers = set(spack.compilers.all_compilers())
+
+        # add our new compiler and find again.
+        compiler_dir = make_mock_compiler()
+
+        try:
+            args = MockArgs(add_paths=[compiler_dir])
+            spack.cmd.compiler.compiler_find(args)
+
+            # ensure new compiler is in there
+            new_compilers = set(spack.compilers.all_compilers())
+            new_compiler = new_compilers - old_compilers
+            self.assertTrue(new_compiler)
+            self.assertTrue(new_compiler.pop().version == Version(test_version))
+
+        finally:
+            shutil.rmtree(compiler_dir, ignore_errors=True)
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 963481054e5c6d2bd3af1dfba02092cb3581a66a..ab201f406aa294b0286c791b12b9b9516716e620 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -23,6 +23,7 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 import spack
+import spack.architecture
 from spack.spec import Spec, CompilerSpec
 from spack.version import ver
 from spack.concretize import find_spec
@@ -253,6 +254,19 @@ def test_external_package(self):
         self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
 
 
+    def test_external_package_module(self):
+        # No tcl modules on darwin/linux machines
+        # TODO: improved way to check for this.
+        if (spack.architecture.sys_type().name == 'darwin' or
+            spack.architecture.sys_type().name == 'linux'):
+            return
+
+        spec = Spec('externalmodule')
+        spec.concretize()
+        self.assertEqual(spec['externalmodule'].external_module, 'external-module')
+        self.assertFalse('externalprereq' in spec)
+        self.assertTrue(spec['externalmodule'].compiler.satisfies('gcc'))
+
     def test_nobuild_package(self):
         got_error = False
         spec = Spec('externaltool%clang')
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index eff482f4c6ef1efb35839700ed007ec9f9b1bf1a..252d77e66be0805263c82ce556cfc68e33ac0d2c 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -32,45 +32,75 @@
 from spack.test.mock_packages_test import *
 
 # Some sample compiler config data
-a_comps =  {
-    "x86_64_E5v2_IntelIB": {
-        "gcc@4.7.3" : {
+a_comps =  [
+    {'compiler': {
+        'paths': {
             "cc" : "/gcc473",
             "cxx": "/g++473",
             "f77": None,
-            "fc" : None },
-        "gcc@4.5.0" : {
+            "fc" : None 
+            },
+        'modules': None,
+        'spec': 'gcc@4.7.3',
+        'operating_system': 'CNL10'
+        }},
+    {'compiler': {
+        'paths': {
             "cc" : "/gcc450",
             "cxx": "/g++450",
-            "f77": "/gfortran",
-            "fc" : "/gfortran" },
-        "clang@3.3"  : {
+            "f77": 'gfortran',
+            "fc" : 'gfortran'
+            },
+        'modules': None,
+        'spec': 'gcc@4.5.0',
+        'operating_system': 'CNL10'
+        }},
+    {'compiler': {
+        'paths': {
             "cc" : "<overwritten>",
             "cxx": "<overwritten>",
-            "f77": "<overwritten>",
-            "fc" : "<overwritten>" }
-    }
-}
-
-b_comps = {
-    "x86_64_E5v3": {
-        "icc@10.0" : {
+            "f77": '<overwritten>',
+            "fc" : '<overwritten>' },
+        'modules': None,
+        'spec': 'clang@3.3',
+        'operating_system': 'CNL10'
+        }}
+]
+
+b_comps = [
+    {'compiler': {
+        'paths': {
             "cc" : "/icc100",
-            "cxx": "/icc100",
+            "cxx": "/icp100",
             "f77": None,
-            "fc" : None },
-        "icc@11.1" : {
+            "fc" : None
+            },
+        'modules': None,
+        'spec': 'icc@10.0',
+        'operating_system': 'CNL10'
+        }},
+    {'compiler': {
+        'paths': {
             "cc" : "/icc111",
             "cxx": "/icp111",
-            "f77": "/ifort",
-            "fc" : "/ifort" },
-        "clang@3.3" : {
-            "cc" : "/clang",
-            "cxx": "/clang++",
-            "f77": None,
-            "fc" : None}
-    }
-}
+            "f77": 'ifort',
+            "fc" : 'ifort'
+            },
+        'modules': None,
+        'spec': 'icc@11.1',
+        'operating_system': 'CNL10'
+        }},
+    {'compiler': {
+        'paths': {
+            "cc" : "<overwritten>",
+            "cxx": "<overwritten>",
+            "f77": '<overwritten>',
+            "fc" : '<overwritten>' },
+        'modules': None,
+        'spec': 'clang@3.3',
+        'operating_system': 'CNL10'
+        }}
+]
 
 # Some Sample repo data
 repos_low = [ "/some/path" ]
@@ -89,15 +119,28 @@ def tearDown(self):
         super(ConfigTest, self).tearDown()
         shutil.rmtree(self.tmp_dir, True)
 
-    def check_config(self, comps, arch, *compiler_names):
+
+    def check_config(self, comps, *compiler_names):
         """Check that named compilers in comps match Spack's config."""
         config = spack.config.get_config('compilers')
         compiler_list = ['cc', 'cxx', 'f77', 'fc']
-        for key in compiler_names:
-            for c in compiler_list:
-                expected = comps[arch][key][c]
-                actual = config[arch][key][c]
-                self.assertEqual(expected, actual)
+        param_list = ['modules', 'paths', 'spec', 'operating_system']
+        for compiler in config:
+            conf = compiler['compiler']
+            if conf['spec'] in compiler_names:
+                comp = None
+                for c in comps:
+                    if c['compiler']['spec'] == conf['spec']:
+                        comp = c['compiler']
+                        break
+                if not comp:
+                    self.fail('Bad config spec')
+                for p in param_list:
+                    self.assertEqual(conf[p], comp[p])
+                for c in compiler_list:
+                    expected = comp['paths'][c]
+                    actual = conf['paths'][c]
+                    self.assertEqual(expected, actual)
 
     def test_write_list_in_memory(self):
         spack.config.update_config('repos', repos_low, 'test_low_priority')
@@ -111,8 +154,9 @@ def test_write_key_in_memory(self):
         spack.config.update_config('compilers', b_comps, 'test_high_priority')
 
         # Make sure the config looks how we expect.
-        self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
-        self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+        self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+        self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
+
 
     def test_write_key_to_disk(self):
         # Write b_comps "on top of" a_comps.
@@ -123,8 +167,8 @@ def test_write_key_to_disk(self):
         spack.config.clear_config_caches()
 
         # Same check again, to ensure consistency.
-        self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
-        self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+        self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+        self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
 
     def test_write_to_same_priority_file(self):
         # Write b_comps in the same file as a_comps.
@@ -135,5 +179,5 @@ def test_write_to_same_priority_file(self):
         spack.config.clear_config_caches()
 
         # Same check again, to ensure consistency.
-        self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
-        self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+        self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+        self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py
index ded1539e18e9f573d402fe0c7cded86f4d27f89c..a0d959db2f173194adea5b1d7b60e3f038b370f1 100644
--- a/lib/spack/spack/test/environment.py
+++ b/lib/spack/spack/test/environment.py
@@ -24,17 +24,20 @@
 ##############################################################################
 import unittest
 import os
+import copy
 from spack.environment import EnvironmentModifications
 
 
 class EnvironmentTest(unittest.TestCase):
     def setUp(self):
-        os.environ.clear()
         os.environ['UNSET_ME'] = 'foo'
         os.environ['EMPTY_PATH_LIST'] = ''
         os.environ['PATH_LIST'] = '/path/second:/path/third'
         os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
 
+    def tearDown(self):
+        pass
+
     def test_set(self):
         env = EnvironmentModifications()
         env.set('A', 'dummy value')
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index 595667bf35c3ac66c41d1e6cf1ec87ae5a6dc90f..a56bd8ebdc935695c96bf2f08464b8210b17d61b 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -34,20 +34,127 @@
 from spack.repository import RepoPath
 from spack.spec import Spec
 
+platform = spack.architecture.sys_type()
+
+linux_os_name = 'debian'
+linux_os_version = '6'
+
+if platform.name == 'linux':
+    linux_os = platform.operating_system("default_os")
+    linux_os_name = linux_os.name
+    linux_os_version = linux_os.version
+
 mock_compiler_config = """\
 compilers:
-  all:
-    clang@3.3:
+- compiler:
+    spec: clang@3.3
+    operating_system: {0}{1}
+    paths:
       cc: /path/to/clang
       cxx: /path/to/clang++
       f77: None
       fc: None
-    gcc@4.5.0:
+    modules: 'None'
+- compiler:
+    spec: gcc@4.5.0  
+    operating_system: {0}{1}
+    paths:
+      cc: /path/to/gcc
+      cxx: /path/to/g++
+      f77: None
+      fc: None
+    modules: 'None'
+- compiler:
+    spec: clang@3.3
+    operating_system: CNL10
+    paths:
+      cc: /path/to/clang
+      cxx: /path/to/clang++
+      f77: None
+      fc: None
+    modules: 'None'
+- compiler:
+    spec: clang@3.3
+    operating_system: SuSE11
+    paths:
+      cc: /path/to/clang
+      cxx: /path/to/clang++
+      f77: None
+      fc: None
+    modules: 'None'
+- compiler:
+    spec: clang@3.3
+    operating_system: redhat6
+    paths:
+      cc: /path/to/clang
+      cxx: /path/to/clang++
+      f77: None
+      fc: None
+    modules: 'None'
+- compiler:
+    spec: clang@3.3
+    operating_system: yosemite
+    paths:
+      cc: /path/to/clang
+      cxx: /path/to/clang++
+      f77: None
+      fc: None
+    modules: 'None'
+- compiler:
+    paths:
       cc: /path/to/gcc
       cxx: /path/to/g++
       f77: /path/to/gfortran
       fc: /path/to/gfortran
-"""
+    operating_system: CNL10
+    spec: gcc@4.5.0
+    modules: 'None'
+- compiler:
+    paths:
+      cc: /path/to/gcc
+      cxx: /path/to/g++
+      f77: /path/to/gfortran
+      fc: /path/to/gfortran
+    operating_system: SuSE11
+    spec: gcc@4.5.0
+    modules: 'None'
+- compiler:
+    paths:
+      cc: /path/to/gcc
+      cxx: /path/to/g++
+      f77: /path/to/gfortran
+      fc: /path/to/gfortran
+    operating_system: redhat6
+    spec: gcc@4.5.0
+    modules: 'None'
+- compiler:
+    paths:
+      cc: /path/to/gcc
+      cxx: /path/to/g++
+      f77: /path/to/gfortran
+      fc: /path/to/gfortran
+    operating_system: yosemite
+    spec: gcc@4.5.0
+    modules: 'None'
+- compiler:
+    paths:
+      cc: /path/to/gcc
+      cxx: /path/to/g++
+      f77: /path/to/gfortran
+      fc: /path/to/gfortran
+    operating_system: elcapitan
+    spec: gcc@4.5.0
+    modules: 'None' 
+- compiler:
+    spec: clang@3.3
+    operating_system: elcapitan
+    paths:
+      cc: /path/to/clang
+      cxx: /path/to/clang++
+      f77: None
+      fc: None
+    modules: 'None'
+""".format(linux_os_name, linux_os_version)
 
 mock_packages_config = """\
 packages:
@@ -60,6 +167,10 @@
     paths:
       externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang
       externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc
+  externalmodule:
+    buildable: False
+    modules:
+      externalmodule@1.0%gcc@4.5.0: external-module
 """
 
 class MockPackagesTest(unittest.TestCase):
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index c73badf8f24d70d370bf17a9c2369a077fd0e6c8..582e067860f242f2571d2d12fb96645243b00d38 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -73,7 +73,7 @@ def mock_open(filename, mode):
         'all': {
             'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
         },
-        'arch=x86-linux': {
+        'platform=test target=x86_64': {
             'environment': {'set': {'FOO': 'foo'},
                             'unset': ['BAR']}
         }
@@ -116,6 +116,7 @@ def tearDown(self):
 
     def get_modulefile_content(self, spec):
         spec.concretize()
+        print spec, '&&&&&'
         generator = spack.modules.TclModule(spec)
         generator.write()
         content = FILE_REGISTRY[generator.file_name].split('\n')
@@ -123,27 +124,28 @@ def get_modulefile_content(self, spec):
 
     def test_simple_case(self):
         spack.modules.CONFIGURATION = configuration_autoload_direct
-        spec = spack.spec.Spec('mpich@3.0.4 arch=x86-linux')
+        spec = spack.spec.Spec('mpich@3.0.4')
         content = self.get_modulefile_content(spec)
         self.assertTrue('module-whatis "mpich @3.0.4"' in content)
 
     def test_autoload(self):
         spack.modules.CONFIGURATION = configuration_autoload_direct
-        spec = spack.spec.Spec('mpileaks arch=x86-linux')
+        spec = spack.spec.Spec('mpileaks')
         content = self.get_modulefile_content(spec)
         self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
         self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
 
         spack.modules.CONFIGURATION = configuration_autoload_all
-        spec = spack.spec.Spec('mpileaks arch=x86-linux')
+        spec = spack.spec.Spec('mpileaks')
         content = self.get_modulefile_content(spec)
         self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
         self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
 
     def test_alter_environment(self):
         spack.modules.CONFIGURATION = configuration_alter_environment
-        spec = spack.spec.Spec('mpileaks arch=x86-linux')
+        spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
         content = self.get_modulefile_content(spec)
+        print content
         self.assertEqual(
             len([x
                  for x in content
@@ -152,8 +154,9 @@ def test_alter_environment(self):
             len([x for x in content if 'setenv FOO "foo"' in x]), 1)
         self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
 
-        spec = spack.spec.Spec('libdwarf arch=x64-linux')
+        spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
         content = self.get_modulefile_content(spec)
+        print content
         self.assertEqual(
             len([x
                  for x in content
@@ -164,14 +167,14 @@ def test_alter_environment(self):
 
     def test_blacklist(self):
         spack.modules.CONFIGURATION = configuration_blacklist
-        spec = spack.spec.Spec('mpileaks arch=x86-linux')
+        spec = spack.spec.Spec('mpileaks')
         content = self.get_modulefile_content(spec)
         self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
         self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
 
     def test_conflicts(self):
         spack.modules.CONFIGURATION = configuration_conflicts
-        spec = spack.spec.Spec('mpileaks arch=x86-linux')
+        spec = spack.spec.Spec('mpileaks')
         content = self.get_modulefile_content(spec)
         self.assertEqual(
             len([x for x in content if x.startswith('conflict')]), 2)
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index a33656adccb6a09359005ae71b349a25fbf95b0a..034e6b3923c1040913738fc7cbf192c5cb6b4211 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -92,21 +92,18 @@ def test_default_works(self):
         self.assertEqual(pkg.has_a_default(), 'default')
 
 
-    def test_architecture_match(self):
-        pkg = spack.repo.get('multimethod arch=x86_64')
-        self.assertEqual(pkg.different_by_architecture(), 'x86_64')
-
-        pkg = spack.repo.get('multimethod arch=ppc64')
-        self.assertEqual(pkg.different_by_architecture(), 'ppc64')
-
-        pkg = spack.repo.get('multimethod arch=ppc32')
-        self.assertEqual(pkg.different_by_architecture(), 'ppc32')
-
-        pkg = spack.repo.get('multimethod arch=arm64')
-        self.assertEqual(pkg.different_by_architecture(), 'arm64')
-
-        pkg = spack.repo.get('multimethod arch=macos')
-        self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
+    def test_target_match(self):
+        platform = spack.architecture.sys_type()
+        targets = platform.targets.values()
+        for target in targets[:-1]:
+            pkg = spack.repo.get('multimethod target='+target.name)
+            self.assertEqual(pkg.different_by_target(), target.name)
+
+        pkg = spack.repo.get('multimethod target='+targets[-1].name)
+        if len(targets) == 1:
+            self.assertEqual(pkg.different_by_target(), targets[-1].name)
+        else:
+            self.assertRaises(NoSuchMethodError, pkg.different_by_target)
 
 
     def test_dependency_match(self):
diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed5f6ff8adc079ae3b3d98e669ee386444fbd660
--- /dev/null
+++ b/lib/spack/spack/test/operating_system.py
@@ -0,0 +1,55 @@
+""" Test checks if the operating_system class is created correctly and that
+the functions are using the correct operating_system. Also checks whether
+the operating_system correctly uses the compiler_strategy
+"""
+
+import unittest
+import os
+import platform
+from spack.platforms.cray_xc import CrayXc
+from spack.platforms.linux import Linux
+from spack.platforms.darwin import Darwin
+from spack.operating_system.linux_distro import LinuxDistro
+from spack.operating_system.mac_os import MacOs
+from spack.operating_system.cnl import ComputeNodeLinux
+
+class TestOperatingSystem(unittest.TestCase):
+
+    def setUp(self):
+        cray_xc = CrayXc()
+        linux   = Linux()
+        darwin  = Darwin()
+        self.cray_operating_sys = cray_xc.operating_system('front_os')
+        self.cray_default_os = cray_xc.operating_system('default_os')
+        self.cray_back_os = cray_xc.operating_system('back_os')
+        self.darwin_operating_sys = darwin.operating_system('default_os')
+        self.linux_operating_sys  = linux.operating_system('default_os')
+
+    def test_cray_front_end_operating_system(self):
+        self.assertIsInstance(self.cray_operating_sys, LinuxDistro)
+
+    def test_cray_front_end_compiler_strategy(self):
+        self.assertEquals(self.cray_operating_sys.compiler_strategy, "PATH")
+
+    def test_cray_back_end_operating_system(self):
+        self.assertIsInstance(self.cray_back_os,ComputeNodeLinux)
+
+    def test_cray_back_end_compiler_strategy(self):
+        self.assertEquals(self.cray_back_os.compiler_strategy, "MODULES")
+
+    def test_linux_operating_system(self):
+        self.assertIsInstance(self.linux_operating_sys, LinuxDistro)
+
+    def test_linux_compiler_strategy(self):
+        self.assertEquals(self.linux_operating_sys.compiler_strategy, "PATH")
+
+
+    def test_cray_front_end_compiler_list(self):
+        """ Operating systems will now be in charge of finding compilers.
+            So, depending on which operating system you want to build for
+            or which operating system you are on, then you could detect
+            compilers in a certain way. Cray linux environment on the front
+            end is just a regular linux distro whereas the Cray linux compute
+            node is a stripped down version which modules are important
+        """
+        self.assertEquals(True, False)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 52f4f7395e055ce6de03291078ae0a8aaafbd5a7..712f07ac4d3c029b767a6c06621732fe668538f7 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -29,6 +29,7 @@
     spack/lib/spack/spack/test/mock_packages
 """
 import spack
+import spack.architecture
 import spack.package
 
 from llnl.util.lang import list_modules
@@ -241,8 +242,10 @@ def test_unsatisfiable_compiler_version(self):
 
 
     def test_unsatisfiable_architecture(self):
-        self.set_pkg_dep('mpileaks', 'mpich arch=bgqos_0')
-        spec = Spec('mpileaks ^mpich arch=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
+        platform = spack.architecture.sys_type()
+
+        self.set_pkg_dep('mpileaks', 'mpich platform=test target=be')
+        spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath ^dyninst ^libelf ^libdwarf')
         self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
 
 
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 0cb78b90edef26dc7a563020e79223259f6f2204..9876bfd5a8088cec1eefc96f53622898c2f17eb4 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -23,6 +23,7 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 import unittest
+import spack.architecture
 from spack.spec import *
 from spack.test.mock_packages_test import *
 
@@ -107,7 +108,8 @@ def test_satisfies_namespace(self):
 
 
     def test_satisfies_namespaced_dep(self):
-        """Ensure spec from same or unspecified namespace satisfies namespace constraint."""
+        """Ensure spec from same or unspecified namespace satisfies namespace
+           constraint."""
         self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
 
         self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
@@ -139,11 +141,16 @@ def test_satisfies_compiler_version(self):
 
 
     def test_satisfies_architecture(self):
-        self.check_satisfies('foo arch=chaos_5_x86_64_ib', ' arch=chaos_5_x86_64_ib')
-        self.check_satisfies('foo arch=bgqos_0', ' arch=bgqos_0')
-
-        self.check_unsatisfiable('foo arch=bgqos_0', ' arch=chaos_5_x86_64_ib')
-        self.check_unsatisfiable('foo arch=chaos_5_x86_64_ib', ' arch=bgqos_0')
+        platform = spack.architecture.sys_type()
+        self.check_satisfies(
+            'foo platform=test target=frontend os=frontend',
+            'platform=test target=frontend os=frontend')
+        self.check_satisfies(
+            'foo platform=test target=backend os=backend',
+            'platform=test target=backend', 'platform=test os=backend')
+        self.check_satisfies(
+            'foo platform=test target=default_target os=default_os',
+            'platform=test target=default_target os=default_os')
 
 
     def test_satisfies_dependencies(self):
@@ -158,10 +165,14 @@ def test_satisfies_dependency_versions(self):
         self.check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3')
         self.check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0')
 
-        self.check_satisfies('mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
-        self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
-        self.check_unsatisfiable('mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
-        self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+        self.check_satisfies(
+            'mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+        self.check_unsatisfiable(
+            'mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+        self.check_unsatisfiable(
+            'mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+        self.check_unsatisfiable(
+            'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
 
 
     def test_satisfies_virtual_dependencies(self):
@@ -350,10 +361,13 @@ def test_constrain_compiler_flags(self):
         self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"')
 
 
-    def test_constrain_arch(self):
-        self.check_constrain('libelf arch=bgqos_0', 'libelf arch=bgqos_0', 'libelf arch=bgqos_0')
-        self.check_constrain('libelf arch=bgqos_0', 'libelf', 'libelf arch=bgqos_0')
-
+    def test_constrain_architecture(self):
+        self.check_constrain('libelf target=default_target os=default_os',
+                             'libelf target=default_target os=default_os',
+                             'libelf target=default_target os=default_os')
+        self.check_constrain('libelf target=default_target os=default_os',
+                             'libelf',
+                             'libelf target=default_target os=default_os')
 
     def test_constrain_compiler(self):
         self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7')
@@ -369,9 +383,8 @@ def test_invalid_constraint(self):
         self.check_invalid_constraint('libelf debug=2', 'libelf debug=1')
 
         self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"')
-
-        self.check_invalid_constraint('libelf arch=bgqos_0', 'libelf arch=x86_54')
-
+        self.check_invalid_constraint('libelf platform=test target=be os=be',
+                                          'libelf target=fe os=fe')
 
     def test_constrain_changed(self):
         self.check_constrain_changed('libelf', '@1.0')
@@ -382,7 +395,10 @@ def test_constrain_changed(self):
         self.check_constrain_changed('libelf', '~debug')
         self.check_constrain_changed('libelf', 'debug=2')
         self.check_constrain_changed('libelf', 'cppflags="-O3"')
-        self.check_constrain_changed('libelf', ' arch=bgqos_0')
+
+        platform = spack.architecture.sys_type()
+        self.check_constrain_changed('libelf', 'target='+platform.target('default_target').name)
+        self.check_constrain_changed('libelf', 'os='+platform.operating_system('default_os').name)
 
 
     def test_constrain_not_changed(self):
@@ -395,9 +411,10 @@ def test_constrain_not_changed(self):
         self.check_constrain_not_changed('libelf~debug', '~debug')
         self.check_constrain_not_changed('libelf debug=2', 'debug=2')
         self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"')
-        self.check_constrain_not_changed('libelf arch=bgqos_0', ' arch=bgqos_0')
-        self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
-        self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
+
+        platform = spack.architecture.sys_type()
+        default_target = platform.target('default_target').name
+        self.check_constrain_not_changed('libelf target='+default_target, 'target='+default_target)
 
 
     def test_constrain_dependency_changed(self):
@@ -407,8 +424,10 @@ def test_constrain_dependency_changed(self):
         self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
         self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
         self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
-        self.check_constrain_changed('libelf^foo', 'libelf^foo cppflags="-O3"')
-        self.check_constrain_changed('libelf^foo', 'libelf^foo arch=bgqos_0')
+
+        platform = spack.architecture.sys_type()
+        default_target = platform.target('default_target').name
+        self.check_constrain_changed('libelf^foo', 'libelf^foo target='+default_target)
 
 
     def test_constrain_dependency_not_changed(self):
@@ -419,5 +438,7 @@ def test_constrain_dependency_not_changed(self):
         self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
         self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
         self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
-        self.check_constrain_not_changed('libelf^foo arch=bgqos_0', 'libelf^foo arch=bgqos_0')
 
+        platform = spack.architecture.sys_type()
+        default_target = platform.target('default_target').name
+        self.check_constrain_not_changed('libelf^foo target='+default_target, 'libelf^foo target='+default_target)
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index c4e4c9cdfe5141ec50078f0b709039054305bf90..4a534d7b5ceb1f6d4473ef8e58ec830a7cd12d02 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -58,7 +58,7 @@ class SpecSyntaxTest(unittest.TestCase):
     # ================================================================================
     # Parse checks
     # ================================================================================
-    def check_parse(self, expected, spec=None):
+    def check_parse(self, expected, spec=None, remove_arch=True):
         """Assert that the provided spec is able to be parsed.
            If this is called with one argument, it assumes that the string is
            canonical (i.e., no spaces and ~ instead of - for variants) and that it
@@ -70,6 +70,7 @@ def check_parse(self, expected, spec=None):
         if spec is None:
             spec = expected
         output = spack.spec.parse(spec)
+
         parsed = (" ".join(str(spec) for spec in output))
         self.assertEqual(expected, parsed)
 
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index a026403e2ef47940889c292f82170ec21185a530..4624f901c8ba8ad71f68c56606fa7624a01565e7 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -43,7 +43,6 @@ def assert_ver_lt(self, a, b):
         self.assertFalse(a > b)
         self.assertFalse(a >= b)
 
-
     def assert_ver_gt(self, a, b):
         a, b = ver(a), ver(b)
         self.assertTrue(a > b)
@@ -53,7 +52,6 @@ def assert_ver_gt(self, a, b):
         self.assertFalse(a < b)
         self.assertFalse(a <= b)
 
-
     def assert_ver_eq(self, a, b):
         a, b = ver(a), ver(b)
         self.assertFalse(a > b)
@@ -63,55 +61,43 @@ def assert_ver_eq(self, a, b):
         self.assertFalse(a < b)
         self.assertTrue(a <= b)
 
-
     def assert_in(self, needle, haystack):
         self.assertTrue(ver(needle) in ver(haystack))
 
-
     def assert_not_in(self, needle, haystack):
         self.assertFalse(ver(needle) in ver(haystack))
 
-
     def assert_canonical(self, canonical_list, version_list):
         self.assertEqual(ver(canonical_list), ver(version_list))
 
-
     def assert_overlaps(self, v1, v2):
         self.assertTrue(ver(v1).overlaps(ver(v2)))
 
-
     def assert_no_overlap(self, v1, v2):
         self.assertFalse(ver(v1).overlaps(ver(v2)))
 
-
     def assert_satisfies(self, v1, v2):
         self.assertTrue(ver(v1).satisfies(ver(v2)))
 
-
     def assert_does_not_satisfy(self, v1, v2):
         self.assertFalse(ver(v1).satisfies(ver(v2)))
 
-
     def check_intersection(self, expected, a, b):
         self.assertEqual(ver(expected), ver(a).intersection(ver(b)))
 
-
     def check_union(self, expected, a, b):
         self.assertEqual(ver(expected), ver(a).union(ver(b)))
 
-
     def test_two_segments(self):
         self.assert_ver_eq('1.0', '1.0')
         self.assert_ver_lt('1.0', '2.0')
         self.assert_ver_gt('2.0', '1.0')
 
-
     def test_three_segments(self):
         self.assert_ver_eq('2.0.1', '2.0.1')
         self.assert_ver_lt('2.0',   '2.0.1')
         self.assert_ver_gt('2.0.1', '2.0')
 
-
     def test_alpha(self):
         # TODO: not sure whether I like this.  2.0.1a is *usually*
         # TODO: less than 2.0.1, but special-casing it makes version
@@ -120,7 +106,6 @@ def test_alpha(self):
         self.assert_ver_gt('2.0.1a', '2.0.1')
         self.assert_ver_lt('2.0.1',  '2.0.1a')
 
-
     def test_patch(self):
         self.assert_ver_eq('5.5p1',  '5.5p1')
         self.assert_ver_lt('5.5p1',  '5.5p2')
@@ -129,7 +114,6 @@ def test_patch(self):
         self.assert_ver_lt('5.5p1',  '5.5p10')
         self.assert_ver_gt('5.5p10', '5.5p1')
 
-
     def test_num_alpha_with_no_separator(self):
         self.assert_ver_lt('10xyz',   '10.1xyz')
         self.assert_ver_gt('10.1xyz', '10xyz')
@@ -137,7 +121,6 @@ def test_num_alpha_with_no_separator(self):
         self.assert_ver_lt('xyz10',   'xyz10.1')
         self.assert_ver_gt('xyz10.1', 'xyz10')
 
-
     def test_alpha_with_dots(self):
         self.assert_ver_eq('xyz.4', 'xyz.4')
         self.assert_ver_lt('xyz.4', '8')
@@ -145,30 +128,25 @@ def test_alpha_with_dots(self):
         self.assert_ver_lt('xyz.4', '2')
         self.assert_ver_gt('2',     'xyz.4')
 
-
     def test_nums_and_patch(self):
         self.assert_ver_lt('5.5p2', '5.6p1')
         self.assert_ver_gt('5.6p1', '5.5p2')
         self.assert_ver_lt('5.6p1', '6.5p1')
         self.assert_ver_gt('6.5p1', '5.6p1')
 
-
     def test_rc_versions(self):
         self.assert_ver_gt('6.0.rc1', '6.0')
         self.assert_ver_lt('6.0',     '6.0.rc1')
 
-
     def test_alpha_beta(self):
         self.assert_ver_gt('10b2', '10a1')
         self.assert_ver_lt('10a2', '10b2')
 
-
     def test_double_alpha(self):
         self.assert_ver_eq('1.0aa', '1.0aa')
         self.assert_ver_lt('1.0a',  '1.0aa')
         self.assert_ver_gt('1.0aa', '1.0a')
 
-
     def test_padded_numbers(self):
         self.assert_ver_eq('10.0001', '10.0001')
         self.assert_ver_eq('10.0001', '10.1')
@@ -176,24 +154,20 @@ def test_padded_numbers(self):
         self.assert_ver_lt('10.0001', '10.0039')
         self.assert_ver_gt('10.0039', '10.0001')
 
-
     def test_close_numbers(self):
         self.assert_ver_lt('4.999.9', '5.0')
         self.assert_ver_gt('5.0',     '4.999.9')
 
-
     def test_date_stamps(self):
         self.assert_ver_eq('20101121', '20101121')
         self.assert_ver_lt('20101121', '20101122')
         self.assert_ver_gt('20101122', '20101121')
 
-
     def test_underscores(self):
         self.assert_ver_eq('2_0', '2_0')
         self.assert_ver_eq('2.0', '2_0')
         self.assert_ver_eq('2_0', '2.0')
 
-
     def test_rpm_oddities(self):
         self.assert_ver_eq('1b.fc17', '1b.fc17')
         self.assert_ver_lt('1b.fc17', '1.fc17')
@@ -202,7 +176,6 @@ def test_rpm_oddities(self):
         self.assert_ver_gt('1g.fc17', '1.fc17')
         self.assert_ver_lt('1.fc17',  '1g.fc17')
 
-
     # Stuff below here is not taken from RPM's tests and is
     # unique to spack
     def test_version_ranges(self):
@@ -214,7 +187,6 @@ def test_version_ranges(self):
         self.assert_ver_lt('1.2:1.4', '1.5:1.6')
         self.assert_ver_gt('1.5:1.6', '1.2:1.4')
 
-
     def test_contains(self):
         self.assert_in('1.3', '1.2:1.4')
         self.assert_in('1.2.5', '1.2:1.4')
@@ -233,7 +205,6 @@ def test_contains(self):
         self.assert_in('1.4.1', '1.2.7:1.4')
         self.assert_not_in('1.4.1', '1.2.7:1.4.0')
 
-
     def test_in_list(self):
         self.assert_in('1.2', ['1.5', '1.2', '1.3'])
         self.assert_in('1.2.5', ['1.5', '1.2:1.3'])
@@ -245,7 +216,6 @@ def test_in_list(self):
         self.assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
         self.assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
 
-
     def test_ranges_overlap(self):
         self.assert_overlaps('1.2', '1.2')
         self.assert_overlaps('1.2.1', '1.2.1')
@@ -262,7 +232,6 @@ def test_ranges_overlap(self):
         self.assert_overlaps(':', '1.6:1.9')
         self.assert_overlaps('1.6:1.9', ':')
 
-
     def test_overlap_with_containment(self):
         self.assert_in('1.6.5', '1.6')
         self.assert_in('1.6.5', ':1.6')
@@ -273,7 +242,6 @@ def test_overlap_with_containment(self):
         self.assert_not_in(':1.6', '1.6.5')
         self.assert_in('1.6.5', ':1.6')
 
-
     def test_lists_overlap(self):
         self.assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
         self.assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
@@ -287,7 +255,6 @@ def test_lists_overlap(self):
         self.assert_no_overlap('1,2,3,4,5', '6,7')
         self.assert_no_overlap('1,2,3,4,5', '6:7')
 
-
     def test_canonicalize_list(self):
         self.assert_canonical(['1.2', '1.3', '1.4'],
                               ['1.2', '1.3', '1.3', '1.4'])
@@ -316,7 +283,6 @@ def test_canonicalize_list(self):
         self.assert_canonical([':'],
                               [':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
 
-
     def test_intersection(self):
         self.check_intersection('2.5',
                                 '1.0:2.5', '2.5:3.0')
@@ -325,12 +291,11 @@ def test_intersection(self):
         self.check_intersection('0:1', ':', '0:1')
 
         self.check_intersection(['1.0', '2.5:2.7'],
-                                ['1.0:2.7'], ['2.5:3.0','1.0'])
+                                ['1.0:2.7'], ['2.5:3.0', '1.0'])
         self.check_intersection(['2.5:2.7'],
-                                ['1.1:2.7'], ['2.5:3.0','1.0'])
+                                ['1.1:2.7'], ['2.5:3.0', '1.0'])
         self.check_intersection(['0:1'], [':'], ['0:1'])
 
-
     def test_intersect_with_containment(self):
         self.check_intersection('1.6.5', '1.6.5', ':1.6')
         self.check_intersection('1.6.5', ':1.6', '1.6.5')
@@ -338,7 +303,6 @@ def test_intersect_with_containment(self):
         self.check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
         self.check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
 
-
     def test_union_with_containment(self):
         self.check_union(':1.6', '1.6.5', ':1.6')
         self.check_union(':1.6', ':1.6', '1.6.5')
@@ -346,8 +310,6 @@ def test_union_with_containment(self):
         self.check_union(':1.6', ':1.6.5', '1.6')
         self.check_union(':1.6', '1.6', ':1.6.5')
 
-
-    def test_union_with_containment(self):
         self.check_union(':', '1.0:', ':2.0')
 
         self.check_union('1:4', '1:3', '2:4')
@@ -356,7 +318,6 @@ def test_union_with_containment(self):
         # Tests successor/predecessor case.
         self.check_union('1:4', '1:2', '3:4')
 
-
     def test_basic_version_satisfaction(self):
         self.assert_satisfies('4.7.3',   '4.7.3')
 
@@ -372,7 +333,6 @@ def test_basic_version_satisfaction(self):
         self.assert_does_not_satisfy('4.8',   '4.9')
         self.assert_does_not_satisfy('4',     '4.9')
 
-
     def test_basic_version_satisfaction_in_lists(self):
         self.assert_satisfies(['4.7.3'],   ['4.7.3'])
 
@@ -388,7 +348,6 @@ def test_basic_version_satisfaction_in_lists(self):
         self.assert_does_not_satisfy(['4.8'],   ['4.9'])
         self.assert_does_not_satisfy(['4'],     ['4.9'])
 
-
     def test_version_range_satisfaction(self):
         self.assert_satisfies('4.7b6', '4.3:4.7')
         self.assert_satisfies('4.3.0', '4.3:4.7')
@@ -400,7 +359,6 @@ def test_version_range_satisfaction(self):
         self.assert_satisfies('4.7b6',        '4.3:4.7')
         self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
 
-
     def test_version_range_satisfaction_in_lists(self):
         self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
         self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
@@ -423,3 +381,11 @@ def test_satisfaction_with_lists(self):
 
         self.assert_satisfies('4.8.0', '4.2, 4.3:4.8')
         self.assert_satisfies('4.8.2', '4.2, 4.3:4.8')
+
+    def test_formatted_strings(self):
+        versions = '1.2.3', '1_2_3', '1-2-3'
+        for item in versions:
+            v = Version(item)
+            self.assertEqual(v.dotted, '1.2.3')
+            self.assertEqual(v.dashed, '1-2-3')
+            self.assertEqual(v.underscored, '1_2_3')
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 38b778fa008206344e2df7938fd65ba81deb7474..14b56e8d6c34280650b92ddc2a4933ac8388e538 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -165,6 +165,7 @@ def streamify(arg, mode):
                 raise ProcessError("Command exited with status %d:" %
                                    proc.returncode, cmd_line)
 
+
             if output is str or error is str:
                 result = ''
                 if output is str:
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 247f6d2362fd85266ca1f861c7912b43bbf1ff91..858d5814728d8a8e5e2e0e0ac2099fe48573d27b 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -43,16 +43,16 @@
   intersection
   concrete
 """
-import os
-import sys
 import re
 from bisect import bisect_left
 from functools import wraps
+
 from functools_backport import total_ordering
 
 # Valid version characters
 VALID_VERSION = r'[A-Za-z0-9_.-]'
 
+
 def int_if_int(string):
     """Convert a string to int if possible.  Otherwise, return a string."""
     try:
@@ -62,10 +62,11 @@ def int_if_int(string):
 
 
 def coerce_versions(a, b):
-    """Convert both a and b to the 'greatest' type between them, in this order:
+    """
+    Convert both a and b to the 'greatest' type between them, in this order:
            Version < VersionRange < VersionList
-       This is used to simplify comparison operations below so that we're always
-       comparing things that are of the same type.
+    This is used to simplify comparison operations below so that we're always
+    comparing things that are of the same type.
     """
     order = (Version, VersionRange, VersionList)
     ta, tb = type(a), type(b)
@@ -105,6 +106,7 @@ def coercing_method(a, b, *args, **kwargs):
 @total_ordering
 class Version(object):
     """Class to represent versions"""
+
     def __init__(self, string):
         string = str(string)
 
@@ -124,6 +126,17 @@ def __init__(self, string):
         # last element of separators is ''
         self.separators = tuple(re.split(segment_regex, string)[1:-1])
 
+    @property
+    def dotted(self):
+        return '.'.join(str(x) for x in self.version)
+
+    @property
+    def underscored(self):
+        return '_'.join(str(x) for x in self.version)
+
+    @property
+    def dashed(self):
+        return '-'.join(str(x) for x in self.version)
 
     def up_to(self, index):
         """Return a version string up to the specified component, exclusive.
@@ -131,15 +144,12 @@ def up_to(self, index):
         """
         return '.'.join(str(x) for x in self[:index])
 
-
     def lowest(self):
         return self
 
-
     def highest(self):
         return self
 
-
     @coerced
     def satisfies(self, other):
         """A Version 'satisfies' another if it is at least as specific and has a
@@ -147,11 +157,10 @@ def satisfies(self, other):
            gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
            a suitable compiler.
         """
-        nself  = len(self.version)
+        nself = len(self.version)
         nother = len(other.version)
         return nother <= nself and self.version[:nother] == other.version
 
-
     def wildcard(self):
         """Create a regex that will match variants of this version string."""
         def a_or_n(seg):
@@ -181,28 +190,22 @@ def a_or_n(seg):
         wc += '(?:[a-z]|alpha|beta)?)?' * (len(segments) - 1)
         return wc
 
-
     def __iter__(self):
         return iter(self.version)
 
-
     def __getitem__(self, idx):
         return tuple(self.version[idx])
 
-
     def __repr__(self):
         return self.string
 
-
     def __str__(self):
         return self.string
 
-
     @property
     def concrete(self):
         return self
 
-
     @coerced
     def __lt__(self, other):
         """Version comparison is designed for consistency with the way RPM
@@ -235,28 +238,23 @@ def __lt__(self, other):
         # If the common prefix is equal, the one with more segments is bigger.
         return len(self.version) < len(other.version)
 
-
     @coerced
     def __eq__(self, other):
         return (other is not None and
                 type(other) == Version and self.version == other.version)
 
-
     def __ne__(self, other):
         return not (self == other)
 
-
     def __hash__(self):
         return hash(self.version)
 
-
     @coerced
     def __contains__(self, other):
         if other is None:
             return False
         return other.version[:len(self.version)] == self.version
 
-
     def is_predecessor(self, other):
         """True if the other version is the immediate predecessor of this one.
            That is, NO versions v exist such that:
@@ -269,16 +267,13 @@ def is_predecessor(self, other):
         ol = other.version[-1]
         return type(sl) == int and type(ol) == int and (ol - sl == 1)
 
-
     def is_successor(self, other):
         return other.is_predecessor(self)
 
-
     @coerced
     def overlaps(self, other):
         return self in other or other in self
 
-
     @coerced
     def union(self, other):
         if self == other or other in self:
@@ -288,7 +283,6 @@ def union(self, other):
         else:
             return VersionList([self, other])
 
-
     @coerced
     def intersection(self, other):
         if self == other:
@@ -299,6 +293,7 @@ def intersection(self, other):
 
 @total_ordering
 class VersionRange(object):
+
     def __init__(self, start, end):
         if isinstance(start, basestring):
             start = Version(start)
@@ -310,15 +305,12 @@ def __init__(self, start, end):
         if start and end and end < start:
             raise ValueError("Invalid Version range: %s" % self)
 
-
     def lowest(self):
         return self.start
 
-
     def highest(self):
         return self.end
 
-
     @coerced
     def __lt__(self, other):
         """Sort VersionRanges lexicographically so that they are ordered first
@@ -331,28 +323,24 @@ def __lt__(self, other):
 
         s, o = self, other
         if s.start != o.start:
-            return s.start is None or (o.start is not None and s.start < o.start)
+            return s.start is None or (o.start is not None and s.start < o.start)  # NOQA: ignore=E501
 
         return (s.end != o.end and
                 o.end is None or (s.end is not None and s.end < o.end))
 
-
     @coerced
     def __eq__(self, other):
         return (other is not None and
                 type(other) == VersionRange and
                 self.start == other.start and self.end == other.end)
 
-
     def __ne__(self, other):
         return not (self == other)
 
-
     @property
     def concrete(self):
         return self.start if self.start == self.end else None
 
-
     @coerced
     def __contains__(self, other):
         if other is None:
@@ -373,57 +361,55 @@ def __contains__(self, other):
                         other.end in self.end)))
         return in_upper
 
-
     @coerced
     def satisfies(self, other):
-        """A VersionRange satisfies another if some version in this range
-           would satisfy some version in the other range.  To do this it must
-           either:
-             a) Overlap with the other range
-             b) The start of this range satisfies the end of the other range.
-
-           This is essentially the same as overlaps(), but overlaps assumes
-           that its arguments are specific.  That is, 4.7 is interpreted as
-           4.7.0.0.0.0... .  This funciton assumes that 4.7 woudl be satisfied
-           by 4.7.3.5, etc.
-
-           Rationale:
-           If a user asks for gcc@4.5:4.7, and a package is only compatible with
-           gcc@4.7.3:4.8, then that package should be able to build under the
-           constraints.  Just using overlaps() would not work here.
-
-           Note that we don't need to check whether the end of this range
-           would satisfy the start of the other range, because overlaps()
-           already covers that case.
-
-           Note further that overlaps() is a symmetric operation, while
-           satisfies() is not.
+        """
+        A VersionRange satisfies another if some version in this range
+        would satisfy some version in the other range.  To do this it must
+        either:
+          a) Overlap with the other range
+          b) The start of this range satisfies the end of the other range.
+
+        This is essentially the same as overlaps(), but overlaps assumes
+        that its arguments are specific.  That is, 4.7 is interpreted as
+        4.7.0.0.0.0... .  This funciton assumes that 4.7 woudl be satisfied
+        by 4.7.3.5, etc.
+
+        Rationale:
+        If a user asks for gcc@4.5:4.7, and a package is only compatible with
+        gcc@4.7.3:4.8, then that package should be able to build under the
+        constraints.  Just using overlaps() would not work here.
+
+        Note that we don't need to check whether the end of this range
+        would satisfy the start of the other range, because overlaps()
+        already covers that case.
+
+        Note further that overlaps() is a symmetric operation, while
+        satisfies() is not.
         """
         return (self.overlaps(other) or
                 # if either self.start or other.end are None, then this can't
                 # satisfy, or overlaps() would've taken care of it.
                 self.start and other.end and self.start.satisfies(other.end))
 
-
     @coerced
     def overlaps(self, other):
-        return ((self.start == None or other.end is None or
+        return ((self.start is None or other.end is None or
                  self.start <= other.end or
                  other.end in self.start or self.start in other.end) and
-                (other.start is None or self.end == None or
+                (other.start is None or self.end is None or
                  other.start <= self.end or
                  other.start in self.end or self.end in other.start))
 
-
     @coerced
     def union(self, other):
         if not self.overlaps(other):
             if (self.end is not None and other.start is not None and
-                self.end.is_predecessor(other.start)):
+                    self.end.is_predecessor(other.start)):
                 return VersionRange(self.start, other.end)
 
             if (other.end is not None and self.start is not None and
-                other.end.is_predecessor(self.start)):
+                    other.end.is_predecessor(self.start)):
                 return VersionRange(other.start, self.end)
 
             return VersionList([self, other])
@@ -442,13 +428,12 @@ def union(self, other):
         else:
             end = self.end
             # TODO: See note in intersection() about < and in discrepancy.
-            if not other.end in self.end:
+            if other.end not in self.end:
                 if end in other.end or other.end > self.end:
                     end = other.end
 
         return VersionRange(start, end)
 
-
     @coerced
     def intersection(self, other):
         if self.overlaps(other):
@@ -470,7 +455,7 @@ def intersection(self, other):
                 #     1.6 < 1.6.5  = True  (lexicographic)
                 # Should 1.6 NOT be less than 1.6.5?  Hm.
                 # Here we test (not end in other.end) first to avoid paradox.
-                if other.end is not None and not end in other.end:
+                if other.end is not None and end not in other.end:
                     if other.end < end or other.end in end:
                         end = other.end
 
@@ -479,15 +464,12 @@ def intersection(self, other):
         else:
             return VersionList()
 
-
     def __hash__(self):
         return hash((self.start, self.end))
 
-
     def __repr__(self):
         return self.__str__()
 
-
     def __str__(self):
         out = ""
         if self.start:
@@ -501,6 +483,7 @@ def __str__(self):
 @total_ordering
 class VersionList(object):
     """Sorted, non-redundant list of Versions and VersionRanges."""
+
     def __init__(self, vlist=None):
         self.versions = []
         if vlist is not None:
@@ -515,7 +498,6 @@ def __init__(self, vlist=None):
                 for v in vlist:
                     self.add(ver(v))
 
-
     def add(self, version):
         if type(version) in (Version, VersionRange):
             # This normalizes single-value version ranges.
@@ -524,9 +506,9 @@ def add(self, version):
 
             i = bisect_left(self, version)
 
-            while i-1 >= 0 and version.overlaps(self[i-1]):
-                version = version.union(self[i-1])
-                del self.versions[i-1]
+            while i - 1 >= 0 and version.overlaps(self[i - 1]):
+                version = version.union(self[i - 1])
+                del self.versions[i - 1]
                 i -= 1
 
             while i < len(self) and version.overlaps(self[i]):
@@ -542,7 +524,6 @@ def add(self, version):
         else:
             raise TypeError("Can't add %s to VersionList" % type(version))
 
-
     @property
     def concrete(self):
         if len(self) == 1:
@@ -550,11 +531,9 @@ def concrete(self):
         else:
             return None
 
-
     def copy(self):
         return VersionList(self)
 
-
     def lowest(self):
         """Get the lowest version in the list."""
         if not self:
@@ -562,7 +541,6 @@ def lowest(self):
         else:
             return self[0].lowest()
 
-
     def highest(self):
         """Get the highest version in the list."""
         if not self:
@@ -570,7 +548,6 @@ def highest(self):
         else:
             return self[-1].highest()
 
-
     @coerced
     def overlaps(self, other):
         if not other or not self:
@@ -586,14 +563,12 @@ def overlaps(self, other):
                 o += 1
         return False
 
-
     def to_dict(self):
         """Generate human-readable dict for YAML."""
         if self.concrete:
-            return { 'version'  : str(self[0]) }
+            return {'version': str(self[0])}
         else:
-            return { 'versions' : [str(v) for v in self] }
-
+            return {'versions': [str(v) for v in self]}
 
     @staticmethod
     def from_dict(dictionary):
@@ -605,7 +580,6 @@ def from_dict(dictionary):
         else:
             raise ValueError("Dict must have 'version' or 'versions' in it.")
 
-
     @coerced
     def satisfies(self, other, strict=False):
         """A VersionList satisfies another if some version in the list
@@ -633,20 +607,17 @@ def satisfies(self, other, strict=False):
                 o += 1
         return False
 
-
     @coerced
     def update(self, other):
         for v in other.versions:
             self.add(v)
 
-
     @coerced
     def union(self, other):
         result = self.copy()
         result.update(other)
         return result
 
-
     @coerced
     def intersection(self, other):
         # TODO: make this faster.  This is O(n^2).
@@ -656,7 +627,6 @@ def intersection(self, other):
                 result.add(s.intersection(o))
         return result
 
-
     @coerced
     def intersect(self, other):
         """Intersect this spec's list with other.
@@ -678,50 +648,40 @@ def __contains__(self, other):
             if i == 0:
                 if version not in self[0]:
                     return False
-            elif all(version not in v for v in self[i-1:]):
+            elif all(version not in v for v in self[i - 1:]):
                 return False
 
         return True
 
-
     def __getitem__(self, index):
         return self.versions[index]
 
-
     def __iter__(self):
         return iter(self.versions)
 
-
     def __reversed__(self):
         return reversed(self.versions)
 
-
     def __len__(self):
         return len(self.versions)
 
-
     @coerced
     def __eq__(self, other):
         return other is not None and self.versions == other.versions
 
-
     def __ne__(self, other):
         return not (self == other)
 
-
     @coerced
     def __lt__(self, other):
         return other is not None and self.versions < other.versions
 
-
     def __hash__(self):
         return hash(tuple(self.versions))
 
-
     def __str__(self):
         return ",".join(str(v) for v in self.versions)
 
-
     def __repr__(self):
         return str(self.versions)
 
@@ -730,7 +690,7 @@ def _string_to_version(string):
     """Converts a string to a Version, VersionList, or VersionRange.
        This is private.  Client code should use ver().
     """
-    string = string.replace(' ','')
+    string = string.replace(' ', '')
 
     if ',' in string:
         return VersionList(string.split(','))
@@ -738,7 +698,7 @@ def _string_to_version(string):
     elif ':' in string:
         s, e = string.split(':')
         start = Version(s) if s else None
-        end   = Version(e) if e else None
+        end = Version(e) if e else None
         return VersionRange(start, end)
 
     else:
diff --git a/lib/spack/spack/yaml_version_check.py b/lib/spack/spack/yaml_version_check.py
new file mode 100644
index 0000000000000000000000000000000000000000..c2d084d6c3bbc925b20c28dbd8a021a36f73baf3
--- /dev/null
+++ b/lib/spack/spack/yaml_version_check.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Yaml Version Check is a module for ensuring that config file
+formats are compatible with the current version of Spack."""
+import os.path
+import os
+import llnl.util.tty as tty
+import spack.util.spack_yaml as syaml
+import spack.config
+
+
+def check_yaml_versions():
+    check_compiler_yaml_version()
+
+def check_compiler_yaml_version():
+    config_scopes = spack.config.config_scopes
+    for scope in config_scopes.values():
+        file_name = os.path.join(scope.path, 'compilers.yaml')
+        data = None
+        if os.path.isfile(file_name):
+            with open(file_name) as f:
+                data = syaml.load(f)
+
+        if data:
+            compilers = data['compilers']
+            if len(compilers) > 0:
+                if (not isinstance(compilers, list)) or 'operating_system' not in compilers[0]['compiler']:
+                    new_file = os.path.join(scope.path, '_old_compilers.yaml')
+                    tty.warn('%s in out of date compilers format. '
+                             'Moved to %s. Spack automatically generate '
+                             'a compilers config file '
+                             % (file_name, new_file))
+                    os.rename(file_name, new_file)
diff --git a/var/spack/mock_configs/site_spackconfig/compilers.yaml b/var/spack/mock_configs/site_spackconfig/compilers.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5f8b38007bb9f466b554e9e45e5c752129bfd414
--- /dev/null
+++ b/var/spack/mock_configs/site_spackconfig/compilers.yaml
@@ -0,0 +1,40 @@
+compilers:
+  all:
+    clang@3.3:
+      cc: /path/to/clang
+      cxx: /path/to/clang++
+      f77: None
+      fc: None
+      modules: None
+      strategy: PATH
+    gcc@4.5.0:
+      cc: /path/to/gcc
+      cxx: /path/to/g++
+      f77: /path/to/gfortran
+      fc: /path/to/gfortran
+      modules: None
+      strategy: PATH
+    gcc@5.2.0:
+      cc: cc
+      cxx: CC
+      f77: ftn
+      fc: ftn
+      modules:
+        - PrgEnv-gnu
+        - gcc/5.2.0
+      strategy: MODULES
+    intel@15.0.1:
+      cc: cc
+      ccx: CC
+      f77: ftn
+      fc: ftn
+      modules:
+      - PrgEnv-intel
+      - intel/15.0.1
+      strategy: MODULES
+    intel@15.1.2:
+      cc: /path/to/icc
+      cxx: /path/to/ic++
+      f77: /path/to/ifort
+      fc: /path/to/ifort
+      strategy: PATH
\ No newline at end of file
diff --git a/var/spack/packages/adios/package.py b/var/spack/packages/adios/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..260dcbe851df879e2716d857a53db7c1bc56a1e8
--- /dev/null
+++ b/var/spack/packages/adios/package.py
@@ -0,0 +1,38 @@
+import os
+
+from spack import *
+class Adios(Package):
+    """The Adaptable IO System (ADIOS) provides a simple, 
+        flexible way for scientists to describe the 
+        data in their code that may need to be written, 
+        read, or processed outside of the running simulation
+    """
+    
+    homepage = "http://www.olcf.ornl.gov/center-projects/adios/"
+    url      = "http://users.nccs.gov/~pnorbert/adios-1.9.0.tar.gz"
+
+    version('1.9.0', 'dbf5cb10e32add2f04c9b4052b7ffa76')
+
+    # Lots of setting up here for this package
+    # module swap PrgEnv-intel PrgEnv-$COMP
+    # module load cray-netcdf/4.3.3.1
+    # module load cray-hdf5/1.8.14
+    # module load python/2.7.10
+    depends_on('hdf5')
+    depends_on('mxml')
+
+    def install(self, spec, prefix):
+        configure_args = ["--prefix=%s" % prefix, 
+                          "--with-mxml=%s" % spec['mxml'].prefix, 
+                          "--with-hdf5=%s" % spec['hdf5'].prefix,
+                          "--with-netcdf=%s" % os.environ["NETCDF_DIR"],
+                          "--with-infiniband=no",
+                          "MPICC=cc","MPICXX=CC","MPIFC=ftn",
+                          "CPPFLAGS=-DMPICH_IGNORE_CXX_SEEK"] 
+
+        if spec.satisfies('%gcc'):
+            configure_args.extend(["CC=gcc", "CXX=g++", "FC=gfortran"])
+
+        configure(*configure_args)
+        make()
+        make("install")
diff --git a/var/spack/packages/mxml/package.py b/var/spack/packages/mxml/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..f79251d312f8cb39115ad22e5329be3a1812f78e
--- /dev/null
+++ b/var/spack/packages/mxml/package.py
@@ -0,0 +1,26 @@
+import os
+from spack import *
+
+class Mxml(Package):
+    """Mini-XML is a small XML library that you can use to read and write XML 
+       and XML-like data files in your application without requiring large 
+       non-standard libraries
+    """
+
+    homepage = "http://www.msweet.org"
+    url      = "http://www.msweet.org/files/project3/mxml-2.9.tar.gz"
+
+    version('2.9', 'e21cad0f7aacd18f942aa0568a8dee19')
+    version('2.8', 'd85ee6d30de053581242c4a86e79a5d2')
+    version('2.7', '76f2ae49bf0f5745d5cb5d9507774dc9')
+    version('2.6', '68977789ae64985dddbd1a1a1652642e')
+    version('2.5', 'f706377fba630b39fa02fd63642b17e5')
+
+    # module swap PrgEnv-intel PrgEnv-$COMP (Can use whatever compiler you want to use) 
+    # Case statement to change CC and CXX flags
+
+    def install(self, spec, prefix):
+        configure('--prefix=%s' % prefix, "--disable-shared", 'CFLAGS=-static')
+        make()
+        make("install")
+
diff --git a/var/spack/repos/builtin.mock/packages/externalmodule/package.py b/var/spack/repos/builtin.mock/packages/externalmodule/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7b0da3fd9185cfc82fb10b643d028173463a5e9
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/externalmodule/package.py
@@ -0,0 +1,37 @@
+
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class Externalmodule(Package):
+    homepage = "http://somewhere.com"
+    url      = "http://somewhere.com/module-1.0.tar.gz"
+
+    version('1.0', '1234567890abcdef1234567890abcdef')
+
+    depends_on('externalprereq')
+
+    def install(self, spec, prefix):
+        pass
diff --git a/var/spack/repos/builtin.mock/packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py
index def73ad82e5ffe934d2ce500f9ead84db517dd39..649afa59454eb45b2db444e125759f0f366d30b4 100644
--- a/var/spack/repos/builtin.mock/packages/multimethod/package.py
+++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py
@@ -22,8 +22,11 @@
 # License along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
+import imp
+from llnl.util.filesystem import join_path
+from spack.util.naming import mod_to_class
 from spack import *
-
+import spack.architecture
 
 class Multimethod(Package):
     """This package is designed for use with Spack's multimethod test.
@@ -101,25 +104,26 @@ def has_a_default(self):
 
 
     #
-    # Make sure we can switch methods on different architectures
+    # Make sure we can switch methods on different target
     #
-    @when('arch=x86_64')
-    def different_by_architecture(self):
-        return 'x86_64'
-
-    @when('arch=ppc64')
-    def different_by_architecture(self):
-        return 'ppc64'
-
-    @when('arch=ppc32')
-    def different_by_architecture(self):
-        return 'ppc32'
-
-    @when('arch=arm64')
-    def different_by_architecture(self):
-        return 'arm64'
-
-
+#    for platform_name in ['cray_xc', 'darwin', 'linux']:
+#        file_path = join_path(spack.platform_path, platform_name)
+#        platform_mod = imp.load_source('spack.platforms', file_path + '.py')
+#        cls = getattr(platform_mod, mod_to_class(platform_name))
+        
+#        platform = cls()
+    platform = spack.architecture.sys_type()
+    targets = platform.targets.values()
+    if len(targets) > 1:
+        targets = targets[:-1]
+    
+    for target in targets:
+        @when('target='+target.name)
+        def different_by_target(self):
+            if isinstance(self.spec.architecture.target,basestring):
+                return self.spec.architecture.target
+            else:
+                return self.spec.architecture.target.name
     #
     # Make sure we can switch methods on different dependencies
     #
diff --git a/var/spack/repos/builtin/packages/bertini/package.py b/var/spack/repos/builtin/packages/bertini/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d7da705e453222f64e84d9c8c71c68cde02b3e8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bertini/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bertini(Package):
+    """Bertini is a general-purpose solver, written in C, that was created
+    for research about polynomial continuation. It solves for the numerical
+    solution of systems of polynomial equations using homotopy continuation."""
+
+    homepage = "https://bertini.nd.edu/"
+    url      = "https://bertini.nd.edu/BertiniSource_v1.5.tar.gz"
+
+    version('1.5', 'e3f6cc6e7f9a0cf1d73185e8671af707')
+
+    variant('mpi', default=True, description='Compile in parallel')
+
+    depends_on('flex')
+    depends_on('bison')
+    depends_on('gmp')
+    depends_on('mpfr')
+    depends_on('mpi', when='+mpi')
+
+    def install(self, spec, prefix):
+        configure('--prefix=%s' % prefix)
+
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py
index 9e4cc98ae6b687413335c596031d6eac4e7cfe43..5f305abb028896e95e2a4fde79d1768006c64f67 100644
--- a/var/spack/repos/builtin/packages/binutils/package.py
+++ b/var/spack/repos/builtin/packages/binutils/package.py
@@ -30,8 +30,9 @@ class Binutils(Package):
 
     url="https://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2"
 
+    # 2.26 is incompatible with py-pillow build for some reason.
     version('2.26', '64146a0faa3b411ba774f47d41de239f')
-    version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66')
+    version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66', preferred=True)
     version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b')
     version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
     version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')
diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py
index 2f2965eb12bb5a6828d7cc0ff7a32994f8dfcba5..cde76c590aac07afb1a20b45589ddd3525c3e1da 100644
--- a/var/spack/repos/builtin/packages/boost/package.py
+++ b/var/spack/repos/builtin/packages/boost/package.py
@@ -27,7 +27,7 @@
 import sys
 
 import os
-import sys
+
 
 class Boost(Package):
     """Boost provides free peer-reviewed portable C++ source
@@ -75,23 +75,23 @@ class Boost(Package):
     version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
 
     default_install_libs = set(['atomic',
-        'chrono',
-        'date_time',
-        'filesystem',
-        'graph',
-        'iostreams',
-        'locale',
-        'log',
-        'math',
-        'program_options',
-        'random',
-        'regex',
-        'serialization',
-        'signals',
-        'system',
-        'test',
-        'thread',
-        'wave'])
+                                'chrono',
+                                'date_time',
+                                'filesystem',
+                                'graph',
+                                'iostreams',
+                                'locale',
+                                'log',
+                                'math',
+                                'program_options',
+                                'random',
+                                'regex',
+                                'serialization',
+                                'signals',
+                                'system',
+                                'test',
+                                'thread',
+                                'wave'])
 
     # mpi/python are not installed by default because they pull in many
     # dependencies and/or because there is a great deal of customization
@@ -109,6 +109,7 @@ class Boost(Package):
     variant('multithreaded', default=True, description="Build multi-threaded versions of libraries")
     variant('singlethreaded', default=True, description="Build single-threaded versions of libraries")
     variant('icu_support', default=False, description="Include ICU support (for regex/locale libraries)")
+    variant('graph', default=False, description="Build the Boost Graph library")
 
     depends_on('icu', when='+icu_support')
     depends_on('python', when='+python')
@@ -120,15 +121,18 @@ class Boost(Package):
     patch('boost_11856.patch', when='@1.60.0%gcc@4.4.7')
 
     def url_for_version(self, version):
-        """Handle Boost's weird URLs, which write the version two different ways."""
+        """
+        Handle Boost's weird URLs,
+        which write the version two different ways.
+        """
         parts = [str(p) for p in Version(version)]
         dots = ".".join(parts)
         underscores = "_".join(parts)
-        return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
-            dots, underscores)
+        return "http://downloads.sourceforge.net/project/boost" \
+               "/boost/%s/boost_%s.tar.bz2" % (dots, underscores)
 
     def determine_toolset(self, spec):
-        if spec.satisfies("arch=darwin-x86_64"):
+        if spec.satisfies("platform=darwin"):
             return 'darwin'
 
         toolsets = {'g++': 'gcc',
@@ -149,20 +153,20 @@ def determine_bootstrap_options(self, spec, withLibs, options):
 
         if '+python' in spec:
             options.append('--with-python=%s' %
-                join_path(spec['python'].prefix.bin, 'python'))
+                           join_path(spec['python'].prefix.bin, 'python'))
 
         with open('user-config.jam', 'w') as f:
             compiler_wrapper = join_path(spack.build_env_path, 'c++')
             f.write("using {0} : : {1} ;\n".format(boostToolsetId,
-                compiler_wrapper))
+                    compiler_wrapper))
 
             if '+mpi' in spec:
                 f.write('using mpi : %s ;\n' %
-                    join_path(spec['mpi'].prefix.bin, 'mpicxx'))
+                        join_path(spec['mpi'].prefix.bin, 'mpicxx'))
             if '+python' in spec:
                 f.write('using python : %s : %s ;\n' %
-                    (spec['python'].version,
-                    join_path(spec['python'].prefix.bin, 'python')))
+                        (spec['python'].version,
+                         join_path(spec['python'].prefix.bin, 'python')))
 
     def determine_b2_options(self, spec, options):
         if '+debug' in spec:
@@ -178,8 +182,7 @@ def determine_b2_options(self, spec, options):
                 '-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include,
                 '-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib,
                 '-s', 'ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include,
-                '-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib,
-                ])
+                '-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib])
 
         linkTypes = ['static']
         if '+shared' in spec:
@@ -191,7 +194,8 @@ def determine_b2_options(self, spec, options):
         if '+singlethreaded' in spec:
             threadingOpts.append('single')
         if not threadingOpts:
-            raise RuntimeError("At least one of {singlethreaded, multithreaded} must be enabled")
+            raise RuntimeError("""At least one of {singlethreaded,
+                               multithreaded} must be enabled""")
 
         options.extend([
             'toolset=%s' % self.determine_toolset(spec),
@@ -202,9 +206,9 @@ def determine_b2_options(self, spec, options):
 
     def install(self, spec, prefix):
         # On Darwin, Boost expects the Darwin libtool. However, one of the
-        # dependencies may have pulled in Spack's GNU libtool, and these two are
-        # not compatible. We thus create a symlink to Darwin's libtool and add
-        # it at the beginning of PATH.
+        # dependencies may have pulled in Spack's GNU libtool, and these two
+        # are not compatible. We thus create a symlink to Darwin's libtool
+        # and add it at the beginning of PATH.
         if sys.platform == 'darwin':
             newdir = os.path.abspath('darwin-libtool')
             mkdirp(newdir)
@@ -217,7 +221,8 @@ def install(self, spec, prefix):
                 withLibs.append(lib)
         if not withLibs:
             # if no libraries are specified for compilation, then you dont have
-            # to configure/build anything, just copy over to the prefix directory.
+            # to configure/build anything, just copy over to the prefix
+            # directory.
             src = join_path(self.stage.source_path, 'boost')
             mkdirp(join_path(prefix, 'include'))
             dst = join_path(prefix, 'include', 'boost')
@@ -235,6 +240,9 @@ def install(self, spec, prefix):
             withLibs.remove('chrono')
         if not spec.satisfies('@1.43.0:'):
             withLibs.remove('random')
+        if '+graph' in spec and '+mpi' in spec:
+            withLibs.remove('graph')
+            withLibs.append('graph_parallel')
 
         # to make Boost find the user-config.jam
         env['BOOST_BUILD_PATH'] = './'
@@ -259,6 +267,7 @@ def install(self, spec, prefix):
         for threadingOpt in threadingOpts:
             b2('install', 'threading=%s' % threadingOpt, *b2_options)
 
-        # The shared libraries are not installed correctly on Darwin; correct this
+        # The shared libraries are not installed correctly
+        # on Darwin; correct this
         if (sys.platform == 'darwin') and ('+shared' in spec):
             fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/c-blosc/package.py b/var/spack/repos/builtin/packages/c-blosc/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..dee332be14c375201b9c7b34b43dd615a9c397e9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/c-blosc/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import sys
+
+from spack import *
+
+class CBlosc(Package):
+    """Blosc, an extremely fast, multi-threaded, meta-compressor library"""
+    homepage = "http://www.blosc.org"
+    url      = "https://github.com/Blosc/c-blosc/archive/v1.9.2.tar.gz"
+
+    version('1.9.2', 'dd2d83069d74b36b8093f1c6b49defc5')
+    version('1.9.1', '7d708d3daadfacf984a87b71b1734ce2')
+    version('1.9.0', 'e4c1dc8e2c468e5cfa2bf05eeee5357a')
+    version('1.8.1', 'd73d5be01359cf271e9386c90dcf5b05')
+    version('1.8.0', '5b92ecb287695ba20cc33d30bf221c4f')
+
+    depends_on("cmake")
+    depends_on("snappy")
+    depends_on("zlib")
+
+    def install(self, spec, prefix):
+        cmake('.', *std_cmake_args)
+
+        make()
+        make("install")
+        if sys.platform == 'darwin':
+            fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py
index 4b8fe0d8afd4ef80c83b6b1de44d88c15a2e2737..a424c73859fc27e2f0f8456bfcbc5cf5059a8ce2 100644
--- a/var/spack/repos/builtin/packages/caliper/package.py
+++ b/var/spack/repos/builtin/packages/caliper/package.py
@@ -34,7 +34,7 @@ class Caliper(Package):
     homepage = "https://github.com/LLNL/Caliper"
     url      = ""
 
-    version('master', git='ssh://git@github.com:LLNL/Caliper.git')
+    version('master', git='https://github.com/LLNL/Caliper.git')
 
     variant('mpi', default=False, description='Enable MPI function wrappers.')
 
diff --git a/var/spack/repos/builtin/packages/daal/package.py b/var/spack/repos/builtin/packages/daal/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..e13dd80e19623aeb2c7cd1d81a58eca426b5dd44
--- /dev/null
+++ b/var/spack/repos/builtin/packages/daal/package.py
@@ -0,0 +1,28 @@
+from spack import *
+import os
+
+from spack.pkg.builtin.intel import IntelInstaller
+
+
+class Daal(IntelInstaller):
+    """Intel Data Analytics Acceleration Library.
+
+    Note: You will have to add the download file to a
+    mirror so that Spack can find it. For instructions on how to set up a
+    mirror, see http://software.llnl.gov/spack/mirrors.html"""
+
+    homepage = "https://software.intel.com/en-us/daal"
+
+    version('2016.2.181', 'aad2aa70e5599ebfe6f85b29d8719d46',
+            url="file://%s/l_daal_2016.2.181.tgz" % os.getcwd())
+    version('2016.3.210', 'ad747c0dd97dace4cad03cf2266cad28',
+            url="file://%s/l_daal_2016.3.210.tgz" % os.getcwd())
+
+    def install(self, spec, prefix):
+
+        self.intel_prefix = os.path.join(prefix, "pkg")
+        IntelInstaller.install(self, spec, prefix)
+
+        daal_dir = os.path.join(self.intel_prefix, "daal")
+        for f in os.listdir(daal_dir):
+            os.symlink(os.path.join(daal_dir, f), os.path.join(self.prefix, f))
diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py
index 49dc971d3a82275f52408b62d6bc8f54c8bc21b1..54b6426d3639aca8b2813521aed74fb1a7ab7374 100644
--- a/var/spack/repos/builtin/packages/dealii/package.py
+++ b/var/spack/repos/builtin/packages/dealii/package.py
@@ -25,18 +25,24 @@
 from spack import *
 import sys
 
+
 class Dealii(Package):
-    """C++ software library providing well-documented tools to build finite element codes for a broad variety of PDEs."""
+    """C++ software library providing well-documented tools to build finite
+    element codes for a broad variety of PDEs."""
     homepage = "https://www.dealii.org"
-    url      = "https://github.com/dealii/dealii/releases/download/v8.4.0/dealii-8.4.0.tar.gz"
+    url      = "https://github.com/dealii/dealii/releases/download/v8.4.1/dealii-8.4.1.tar.gz"
 
+    version('8.4.1', 'efbaf16f9ad59cfccad62302f36c3c1d')
     version('8.4.0', 'ac5dbf676096ff61e092ce98c80c2b00')
+    version('8.3.0', 'fc6cdcb16309ef4bea338a4f014de6fa')
+    version('8.2.1', '71c728dbec14f371297cd405776ccf08')
+    version('8.1.0', 'aa8fadc2ce5eb674f44f997461bf668d')
     version('dev', git='https://github.com/dealii/dealii.git')
 
     variant('mpi',      default=True,  description='Compile with MPI')
     variant('arpack',   default=True,  description='Compile with Arpack and PArpack (only with MPI)')
     variant('doc',      default=False, description='Compile with documentation')
-    variant('gsl' ,     default=True,  description='Compile with GSL')
+    variant('gsl',      default=True,  description='Compile with GSL')
     variant('hdf5',     default=True,  description='Compile with HDF5 (only with MPI)')
     variant('metis',    default=True,  description='Compile with Metis')
     variant('netcdf',   default=True,  description='Compile with Netcdf (only with MPI)')
@@ -47,38 +53,40 @@ class Dealii(Package):
     variant('trilinos', default=True,  description='Compile with Trilinos (only with MPI)')
 
     # required dependencies, light version
-    depends_on ("blas")
-    # Boost 1.58 is blacklisted, see https://github.com/dealii/dealii/issues/1591
-    # require at least 1.59
-    depends_on ("boost@1.59.0:",     when='~mpi')
-    depends_on ("boost@1.59.0:+mpi", when='+mpi')
-    depends_on ("bzip2")
-    depends_on ("cmake")
-    depends_on ("lapack")
-    depends_on ("muparser")
-    depends_on ("suite-sparse")
-    depends_on ("tbb")
-    depends_on ("zlib")
+    depends_on("blas")
+    # Boost 1.58 is blacklisted, see
+    # https://github.com/dealii/dealii/issues/1591
+    # Require at least 1.59
+    depends_on("boost@1.59.0:+thread+system+serialization+iostreams",     when='~mpi')  # NOQA: ignore=E501
+    depends_on("boost@1.59.0:+mpi+thread+system+serialization+iostreams", when='+mpi')  # NOQA: ignore=E501
+    depends_on("bzip2")
+    depends_on("cmake")
+    depends_on("lapack")
+    depends_on("muparser")
+    depends_on("suite-sparse")
+    depends_on("tbb")
+    depends_on("zlib")
 
     # optional dependencies
-    depends_on ("mpi", when="+mpi")
-    depends_on ("arpack-ng+mpi", when='+arpack+mpi')
-    depends_on ("doxygen", when='+doc')
-    depends_on ("gsl", when='@8.5.0:+gsl')
-    depends_on ("gsl", when='@dev+gsl')
-    depends_on ("hdf5+mpi~cxx", when='+hdf5+mpi') #FIXME NetCDF declares dependency with ~cxx, why?
-    depends_on ("metis@5:", when='+metis')
-    depends_on ("netcdf+mpi", when="+netcdf+mpi")
-    depends_on ("netcdf-cxx", when='+netcdf+mpi')
-    depends_on ("oce", when='+oce')
-    depends_on ("p4est", when='+p4est+mpi')
-    depends_on ("petsc+mpi", when='+petsc+mpi')
-    depends_on ("slepc", when='+slepc+petsc+mpi')
-    depends_on ("trilinos", when='+trilinos+mpi')
+    depends_on("mpi",              when="+mpi")
+    depends_on("arpack-ng+mpi",    when='+arpack+mpi')
+    depends_on("doxygen+graphviz", when='+doc')
+    depends_on("graphviz",         when='+doc')
+    depends_on("gsl",              when='@8.5.0:+gsl')
+    depends_on("gsl",              when='@dev+gsl')
+    depends_on("hdf5+mpi",         when='+hdf5+mpi')
+    depends_on("metis@5:",         when='+metis')
+    depends_on("netcdf+mpi",       when="+netcdf+mpi")
+    depends_on("netcdf-cxx",       when='+netcdf+mpi')
+    depends_on("oce",              when='+oce')
+    depends_on("p4est",            when='+p4est+mpi')
+    depends_on("petsc+mpi",        when='+petsc+mpi')
+    depends_on("slepc",            when='+slepc+petsc+mpi')
+    depends_on("trilinos",         when='+trilinos+mpi')
 
     # developer dependnecies
-    depends_on ("numdiff", when='@dev')
-    depends_on ("astyle@2.04", when='@dev')
+    depends_on("numdiff",     when='@dev')
+    depends_on("astyle@2.04", when='@dev')
 
     def install(self, spec, prefix):
         options = []
@@ -96,17 +104,17 @@ def install(self, spec, prefix):
             '-DDEAL_II_WITH_THREADS:BOOL=ON',
             '-DBOOST_DIR=%s' % spec['boost'].prefix,
             '-DBZIP2_DIR=%s' % spec['bzip2'].prefix,
-            # CMake's FindBlas/Lapack may pickup system's blas/lapack instead of Spack's.
-            # Be more specific to avoid this.
-            # Note that both lapack and blas are provided in -DLAPACK_XYZ variables
+            # CMake's FindBlas/Lapack may pickup system's blas/lapack instead
+            # of Spack's. Be more specific to avoid this.
+            # Note that both lapack and blas are provided in -DLAPACK_XYZ.
             '-DLAPACK_FOUND=true',
             '-DLAPACK_INCLUDE_DIRS=%s;%s' %
                 (spec['lapack'].prefix.include,
                  spec['blas'].prefix.include),
             '-DLAPACK_LIBRARIES=%s;%s' %
-                (join_path(spec['lapack'].prefix.lib,'liblapack.%s' % dsuf), # FIXME don't hardcode names
-                 join_path(spec['blas'].prefix.lib,'libblas.%s' % dsuf)),    # FIXME don't hardcode names
-            '-DMUPARSER_DIR=%s ' % spec['muparser'].prefix,
+                (spec['lapack'].lapack_shared_lib,
+                 spec['blas'].blas_shared_lib),
+            '-DMUPARSER_DIR=%s' % spec['muparser'].prefix,
             '-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix,
             '-DTBB_DIR=%s' % spec['tbb'].prefix,
             '-DZLIB_DIR=%s' % spec['zlib'].prefix
@@ -116,33 +124,34 @@ def install(self, spec, prefix):
         if '+mpi' in spec:
             options.extend([
                 '-DDEAL_II_WITH_MPI:BOOL=ON',
-                '-DCMAKE_C_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # FIXME: avoid hardcoding mpi wrappers names
-                '-DCMAKE_CXX_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
-                '-DCMAKE_Fortran_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+                '-DCMAKE_C_COMPILER=%s'       % spec['mpi'].mpicc,
+                '-DCMAKE_CXX_COMPILER=%s'     % spec['mpi'].mpicxx,
+                '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc,
             ])
         else:
             options.extend([
                 '-DDEAL_II_WITH_MPI:BOOL=OFF',
             ])
 
-        # Optional dependencies for which librariy names are the same as CMake variables
-        for library in ('gsl','hdf5','p4est','petsc','slepc','trilinos','metis'):
+        # Optional dependencies for which librariy names are the same as CMake
+        # variables:
+        for library in ('gsl', 'hdf5', 'p4est', 'petsc', 'slepc', 'trilinos', 'metis'):  # NOQA: ignore=E501
             if library in spec:
                 options.extend([
-                    '-D{library}_DIR={value}'.format(library=library.upper(), value=spec[library].prefix),
-                    '-DDEAL_II_WITH_{library}:BOOL=ON'.format(library=library.upper())
+                    '-D%s_DIR=%s' % (library.upper(), spec[library].prefix),
+                    '-DDEAL_II_WITH_%s:BOOL=ON' % library.upper()
                 ])
             else:
                 options.extend([
-                    '-DDEAL_II_WITH_{library}:BOOL=OFF'.format(library=library.upper())
+                    '-DDEAL_II_WITH_%s:BOOL=OFF' % library.upper()
                 ])
 
         # doxygen
         options.extend([
-            '-DDEAL_II_COMPONENT_DOCUMENTATION=%s' % ('ON' if '+doc' in spec else 'OFF'),
+            '-DDEAL_II_COMPONENT_DOCUMENTATION=%s' %
+            ('ON' if '+doc' in spec else 'OFF'),
         ])
 
-
         # arpack
         if '+arpack' in spec:
             options.extend([
@@ -160,11 +169,13 @@ def install(self, spec, prefix):
             options.extend([
                 '-DNETCDF_FOUND=true',
                 '-DNETCDF_LIBRARIES=%s;%s' %
-                    (join_path(spec['netcdf-cxx'].prefix.lib,'libnetcdf_c++.%s' % dsuf),
-                    join_path(spec['netcdf'].prefix.lib,'libnetcdf.%s' % dsuf)),
+                    (join_path(spec['netcdf-cxx'].prefix.lib,
+                               'libnetcdf_c++.%s' % dsuf),
+                     join_path(spec['netcdf'].prefix.lib,
+                               'libnetcdf.%s' % dsuf)),
                 '-DNETCDF_INCLUDE_DIRS=%s;%s' %
                     (spec['netcdf-cxx'].prefix.include,
-                    spec['netcdf'].prefix.include),
+                     spec['netcdf'].prefix.include),
             ])
         else:
             options.extend([
@@ -200,7 +211,7 @@ def install(self, spec, prefix):
         with working_dir('examples/step-3'):
             cmake('.')
             make('release')
-            make('run',parallel=False)
+            make('run', parallel=False)
 
         # An example which uses Metis + PETSc
         # FIXME: switch step-18 to MPI
@@ -213,7 +224,7 @@ def install(self, spec, prefix):
             if '^petsc' in spec and '^metis' in spec:
                 cmake('.')
                 make('release')
-                make('run',parallel=False)
+                make('run', parallel=False)
 
         # take step-40 which can use both PETSc and Trilinos
         # FIXME: switch step-40 to MPI run
@@ -222,43 +233,58 @@ def install(self, spec, prefix):
             print('========== Step-40 PETSc ============')
             print('=====================================')
             # list the number of cycles to speed up
-            filter_file(r'(const unsigned int n_cycles = 8;)',  ('const unsigned int n_cycles = 2;'), 'step-40.cc')
+            filter_file(r'(const unsigned int n_cycles = 8;)',
+                        ('const unsigned int n_cycles = 2;'), 'step-40.cc')
             cmake('.')
             if '^petsc' in spec:
                 make('release')
-                make('run',parallel=False)
+                make('run', parallel=False)
 
             print('=====================================')
             print('========= Step-40 Trilinos ==========')
             print('=====================================')
             # change Linear Algebra to Trilinos
-            filter_file(r'(\/\/ #define FORCE_USE_OF_TRILINOS.*)',  ('#define FORCE_USE_OF_TRILINOS'), 'step-40.cc')
+            # The below filter_file should be different for versions
+            # before and after 8.4.0
+            if spec.satisfies('@8.4.0:'):
+                filter_file(r'(\/\/ #define FORCE_USE_OF_TRILINOS.*)',
+                            ('#define FORCE_USE_OF_TRILINOS'), 'step-40.cc')
+            else:
+                filter_file(r'(#define USE_PETSC_LA.*)',
+                            ('// #define USE_PETSC_LA'), 'step-40.cc')
             if '^trilinos+hypre' in spec:
                 make('release')
-                make('run',parallel=False)
+                make('run', parallel=False)
 
-            print('=====================================')
-            print('=== Step-40 Trilinos SuperluDist ====')
-            print('=====================================')
-            # change to direct solvers
-            filter_file(r'(LA::SolverCG solver\(solver_control\);)',  ('TrilinosWrappers::SolverDirect::AdditionalData data(false,"Amesos_Superludist"); TrilinosWrappers::SolverDirect solver(solver_control,data);'), 'step-40.cc')
-            filter_file(r'(LA::MPI::PreconditionAMG preconditioner;)',  (''), 'step-40.cc')
-            filter_file(r'(LA::MPI::PreconditionAMG::AdditionalData data;)',  (''), 'step-40.cc')
-            filter_file(r'(preconditioner.initialize\(system_matrix, data\);)',  (''), 'step-40.cc')
-            filter_file(r'(solver\.solve \(system_matrix, completely_distributed_solution, system_rhs,)',  ('solver.solve (system_matrix, completely_distributed_solution, system_rhs);'), 'step-40.cc')
-            filter_file(r'(preconditioner\);)',  (''), 'step-40.cc')
-            if '^trilinos+superlu-dist' in spec:
-                make('release')
-                make('run',paralle=False)
+            # the rest of the tests on step 40 only works for
+            # dealii version 8.4.0 and after
+            if spec.satisfies('@8.4.0:'):
+                print('=====================================')
+                print('=== Step-40 Trilinos SuperluDist ====')
+                print('=====================================')
+                # change to direct solvers
+                filter_file(r'(LA::SolverCG solver\(solver_control\);)',  ('TrilinosWrappers::SolverDirect::AdditionalData data(false,"Amesos_Superludist"); TrilinosWrappers::SolverDirect solver(solver_control,data);'), 'step-40.cc')  # NOQA: ignore=E501
+                filter_file(r'(LA::MPI::PreconditionAMG preconditioner;)',
+                            (''), 'step-40.cc')
+                filter_file(r'(LA::MPI::PreconditionAMG::AdditionalData data;)',
+                            (''), 'step-40.cc')
+                filter_file(r'(preconditioner.initialize\(system_matrix, data\);)',
+                            (''), 'step-40.cc')
+                filter_file(r'(solver\.solve \(system_matrix, completely_distributed_solution, system_rhs,)',  ('solver.solve (system_matrix, completely_distributed_solution, system_rhs);'), 'step-40.cc')  # NOQA: ignore=E501
+                filter_file(r'(preconditioner\);)',  (''), 'step-40.cc')
+                if '^trilinos+superlu-dist' in spec:
+                    make('release')
+                    make('run', paralle=False)
 
-            print('=====================================')
-            print('====== Step-40 Trilinos MUMPS =======')
-            print('=====================================')
-            # switch to Mumps
-            filter_file(r'(Amesos_Superludist)',  ('Amesos_Mumps'), 'step-40.cc')
-            if '^trilinos+mumps' in spec:
-                make('release')
-                make('run',parallel=False)
+                print('=====================================')
+                print('====== Step-40 Trilinos MUMPS =======')
+                print('=====================================')
+                # switch to Mumps
+                filter_file(r'(Amesos_Superludist)',
+                            ('Amesos_Mumps'), 'step-40.cc')
+                if '^trilinos+mumps' in spec:
+                    make('release')
+                    make('run', parallel=False)
 
         print('=====================================')
         print('============ Step-36 ================')
@@ -267,7 +293,7 @@ def install(self, spec, prefix):
             if 'slepc' in spec:
                 cmake('.')
                 make('release')
-                make('run',parallel=False)
+                make('run', parallel=False)
 
         print('=====================================')
         print('============ Step-54 ================')
@@ -276,7 +302,7 @@ def install(self, spec, prefix):
             if 'oce' in spec:
                 cmake('.')
                 make('release')
-                make('run',parallel=False)
+                make('run', parallel=False)
 
     def setup_environment(self, spack_env, env):
         env.set('DEAL_II_DIR', self.prefix)
diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py
index ef6a3ccc7b03b0bdaf8fe874024755018079be83..447964f2869de2a98f3fbbbf0b1a30620f58ec12 100644
--- a/var/spack/repos/builtin/packages/espresso/package.py
+++ b/var/spack/repos/builtin/packages/espresso/package.py
@@ -26,20 +26,28 @@
 
 import os
 
+
 class Espresso(Package):
     """
-    QE is an integrated suite of Open-Source computer codes for electronic-structure calculations and materials
-    modeling at the nanoscale. It is based on density-functional theory, plane waves, and pseudopotentials.
+    QE is an integrated suite of Open-Source computer codes for
+    electronic-structure calculations and materials modeling at
+    the nanoscale. It is based on density-functional theory, plane
+    waves, and pseudopotentials.
     """
     homepage = 'http://quantum-espresso.org'
     url = 'http://www.qe-forge.org/gf/download/frsrelease/204/912/espresso-5.3.0.tar.gz'
 
+    version(
+        '5.4.0',
+        '8bb78181b39bd084ae5cb7a512c1cfe7',
+        url='http://www.qe-forge.org/gf/download/frsrelease/211/968/espresso-5.4.0.tar.gz'
+    )
     version('5.3.0', '6848fcfaeb118587d6be36bd10b7f2c3')
 
-    variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support')
+    variant('mpi', default=True, description='Builds with mpi support')
     variant('openmp', default=False, description='Enables openMP support')
     variant('scalapack', default=True, description='Enables scalapack support')
-    variant('elpa', default=True, description='Use elpa as an eigenvalue solver')
+    variant('elpa', default=True, description='Uses elpa as an eigenvalue solver')
 
     depends_on('blas')
     depends_on('lapack')
@@ -47,7 +55,12 @@ class Espresso(Package):
     depends_on('mpi', when='+mpi')
     depends_on('fftw~mpi', when='~mpi')
     depends_on('fftw+mpi', when='+mpi')
-    depends_on('scalapack', when='+scalapack+mpi')  # TODO : + mpi needed to avoid false dependencies installation
+    # TODO : + mpi needed to avoid false dependencies installation
+    depends_on('scalapack', when='+scalapack+mpi')
+
+    # Spurious problems running in parallel the Makefile
+    # generated by qe configure
+    parallel = False
 
     def check_variants(self, spec):
         error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active'
@@ -87,10 +100,9 @@ def install(self, spec, prefix):
         configure(*options)
         make('all')
 
-        if spec.architecture.startswith('darwin'):
+        if spec.satisfies('platform=darwin'):
             mkdirp(prefix.bin)
             for filename in glob("bin/*.x"):
                 install(filename, prefix.bin)
         else:
             make('install')
-
diff --git a/var/spack/repos/builtin/packages/fenics/package.py b/var/spack/repos/builtin/packages/fenics/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..08452376567e60b6ba85b96a37fe8f17d6f46285
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fenics/package.py
@@ -0,0 +1,176 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fenics(Package):
+    """FEniCS is organized as a collection of interoperable components
+    that together form the FEniCS Project. These components include
+    the problem-solving environment DOLFIN, the form compiler FFC, the
+    finite element tabulator FIAT, the just-in-time compiler Instant,
+    the code generation interface UFC, the form language UFL and a
+    range of additional components."""
+
+    homepage = "http://fenicsproject.org/"
+    url      = "https://bitbucket.org/fenics-project/dolfin/downloads/dolfin-1.6.0.tar.gz"
+
+    base_url = "https://bitbucket.org/fenics-project/{pkg}/downloads/{pkg}-{version}.tar.gz"  # NOQA: ignore E501
+
+    variant('hdf5',         default=True,  description='Compile with HDF5')
+    variant('parmetis',     default=True,  description='Compile with ParMETIS')
+    variant('scotch',       default=True,  description='Compile with Scotch')
+    variant('petsc',        default=True,  description='Compile with PETSc')
+    variant('slepc',        default=True,  description='Compile with SLEPc')
+    variant('trilinos',     default=True,  description='Compile with Trilinos')
+    variant('suite-sparse', default=True,  description='Compile with SuiteSparse solvers')
+    variant('vtk',          default=False, description='Compile with VTK')
+    variant('qt',           default=False, description='Compile with QT')
+    variant('mpi',          default=True,  description='Enables the distributed memory support')
+    variant('openmp',       default=True,  description='Enables the shared memory support')
+    variant('shared',       default=True,  description='Enables the build of shared libraries')
+    variant('debug',        default=False, description='Builds a debug version of the libraries')
+
+    # not part of spack list for now
+    # variant('petsc4py',     default=True,  description='Uses PETSc4py')
+    # variant('slepc4py',     default=True,  description='Uses SLEPc4py')
+    # variant('pastix',       default=True,  description='Compile with Pastix')
+
+    extends('python')
+
+    depends_on('py-numpy')
+    depends_on('py-ply')
+    depends_on('py-six')
+    depends_on('py-sphinx@1.0.1:', when='+doc')
+    depends_on('eigen@3.2.0:')
+    depends_on('boost')
+    depends_on('mpi', when='+mpi')
+    depends_on('hdf5', when='+hdf5')
+    depends_on('parmetis@4.0.2:^metis+real64', when='+parmetis')
+    depends_on('scotch~metis', when='+scotch~mpi')
+    depends_on('scotch+mpi~metis', when='+scotch+mpi')
+    depends_on('petsc@3.4:', when='+petsc')
+    depends_on('slepc@3.4:', when='+slepc')
+    depends_on('trilinos', when='+trilinos')
+    depends_on('vtk', when='+vtk')
+    depends_on('suite-sparse', when='+suite-sparse')
+    depends_on('qt', when='+qt')
+
+    # This are the build dependencies
+    depends_on('py-setuptools')
+    depends_on('cmake@2.8.12:')
+    depends_on('swig@3.0.3:')
+
+    releases = [
+        {
+            'version': '1.6.0',
+            'md5': '35cb4baf7ab4152a40fb7310b34d5800',
+            'resources': {
+                'ffc': '358faa3e9da62a1b1a717070217b793e',
+                'fiat': 'f4509d05c911fd93cea8d288a78a6c6f',
+                'instant': '5f2522eb032a5bebbad6597b6fe0732a',
+                'ufl': 'c40c5f04eaa847377ab2323122284016',
+            }
+        },
+        {
+            'version': '1.5.0',
+            'md5': '9b589a3534299a5e6d22c13c5eb30bb8',
+            'resources': {
+                'ffc': '343f6d30e7e77d329a400fd8e73e0b63',
+                'fiat': 'da3fa4dd8177bb251e7f68ec9c7cf6c5',
+                'instant': 'b744023ded27ee9df4a8d8c6698c0d58',
+                'ufl': '130d7829cf5a4bd5b52bf6d0955116fd',
+            }
+        },
+    ]
+
+    for release in releases:
+        version(release['version'], release['md5'], url=base_url.format(pkg='dolfin', version=release['version']))
+        for name, md5 in release['resources'].items():
+            resource(name=name,
+                     url=base_url.format(pkg=name, **release),
+                     md5=md5,
+                     destination='depends',
+                     when='@{version}'.format(**release),
+                     placement=name)
+
+    def cmake_is_on(self, option):
+        return 'ON' if option in self.spec else 'OFF'
+
+    def install(self, spec, prefix):
+        for package in ['ufl', 'ffc', 'fiat', 'instant']:
+            with working_dir(join_path('depends', package)):
+                python('setup.py', 'install', '--prefix=%s' % prefix)
+
+        cmake_args = [
+            '-DCMAKE_BUILD_TYPE:STRING={0}'.format(
+                'Debug' if '+debug' in spec else 'RelWithDebInfo'),
+            '-DBUILD_SHARED_LIBS:BOOL={0}'.format(
+                self.cmake_is_on('+shared')),
+            '-DDOLFIN_SKIP_BUILD_TESTS:BOOL=ON',
+            '-DDOLFIN_ENABLE_OPENMP:BOOL={0}'.format(
+                self.cmake_is_on('+openmp')),
+            '-DDOLFIN_ENABLE_CHOLMOD:BOOL={0}'.format(
+                self.cmake_is_on('suite-sparse')),
+            '-DDOLFIN_ENABLE_HDF5:BOOL={0}'.format(
+                self.cmake_is_on('hdf5')),
+            '-DDOLFIN_ENABLE_MPI:BOOL={0}'.format(
+                self.cmake_is_on('mpi')),
+            '-DDOLFIN_ENABLE_PARMETIS:BOOL={0}'.format(
+                self.cmake_is_on('parmetis')),
+            '-DDOLFIN_ENABLE_PASTIX:BOOL={0}'.format(
+                self.cmake_is_on('pastix')),
+            '-DDOLFIN_ENABLE_PETSC:BOOL={0}'.format(
+                self.cmake_is_on('petsc')),
+            '-DDOLFIN_ENABLE_PETSC4PY:BOOL={0}'.format(
+                self.cmake_is_on('py-petsc4py')),
+            '-DDOLFIN_ENABLE_PYTHON:BOOL={0}'.format(
+                self.cmake_is_on('python')),
+            '-DDOLFIN_ENABLE_QT:BOOL={0}'.format(
+                self.cmake_is_on('qt')),
+            '-DDOLFIN_ENABLE_SCOTCH:BOOL={0}'.format(
+                self.cmake_is_on('scotch')),
+            '-DDOLFIN_ENABLE_SLEPC:BOOL={0}'.format(
+                self.cmake_is_on('slepc')),
+            '-DDOLFIN_ENABLE_SLEPC4PY:BOOL={0}'.format(
+                self.cmake_is_on('py-slepc4py')),
+            '-DDOLFIN_ENABLE_SPHINX:BOOL={0}'.format(
+                self.cmake_is_on('py-sphinx')),
+            '-DDOLFIN_ENABLE_TRILINOS:BOOL={0}'.format(
+                self.cmake_is_on('trilinos')),
+            '-DDOLFIN_ENABLE_UMFPACK:BOOL={0}'.format(
+                self.cmake_is_on('suite-sparse')),
+            '-DDOLFIN_ENABLE_VTK:BOOL={0}'.format(
+                self.cmake_is_on('vtk')),
+            '-DDOLFIN_ENABLE_ZLIB:BOOL={0}'.format(
+                self.cmake_is_on('zlib')),
+        ]
+
+        cmake_args.extend(std_cmake_args)
+
+        with working_dir('build', create=True):
+            cmake('..', *cmake_args)
+
+            make()
+            make('install')
diff --git a/var/spack/repos/builtin/packages/hdf5-blosc/package.py b/var/spack/repos/builtin/packages/hdf5-blosc/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..50f380083cf5e3b8cb762780f208b5cd1dfa3cc2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hdf5-blosc/package.py
@@ -0,0 +1,206 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import os
+import shutil
+import sys
+
+from spack import *
+
+def _install_shlib(name, src, dst):
+    """Install a shared library from directory src to directory dst"""
+    if sys.platform == "darwin":
+        shlib0 = name + ".0.dylib"
+        shlib = name + ".dylib"
+        shutil.copyfile(join_path(src, shlib0), join_path(dst, shlib0))
+        os.symlink(shlib0, join_path(dst, shlib))
+    else:
+        shlib000 = name + ".so.0.0.0"
+        shlib0 = name + ".so.0"
+        shlib = name + ".dylib"
+        shutil.copyfile(join_path(src, shlib000), join_path(dst, shlib000))
+        os.symlink(shlib000, join_path(dst, shlib0))
+        os.symlink(shlib0, join_path(dst, shlib))
+
+class Hdf5Blosc(Package):
+    """Blosc filter for HDF5"""
+    homepage = "https://github.com/Blosc/hdf5-blosc"
+    url      = "https://github.com/Blosc/hdf5-blosc/archive/master.zip"
+
+    version('master', '02c04acbf4bec66ec8a35bf157d1c9de')
+
+    depends_on("c-blosc")
+    depends_on("hdf5")
+    depends_on("libtool")
+
+    parallel = False
+
+    def install(self, spec, prefix):
+        # The included cmake recipe doesn"t work for Darwin
+        # cmake(".", *std_cmake_args)
+        # 
+        # make()
+        # make("install")
+        # if sys.platform == "darwin":
+        #     fix_darwin_install_name(prefix.lib)
+
+        libtool = Executable(join_path(spec["libtool"].prefix.bin, "libtool"))
+        if "+mpi" in spec["hdf5"]:
+            cc = "mpicc"
+        else:
+            cc = "cc"
+        shlibext = "so" if sys.platform!="darwin" else "dylib"
+        mkdirp(prefix.include)
+        mkdirp(prefix.lib)
+
+        # Build and install filter
+        with working_dir("src"):
+            libtool("--mode=compile", "--tag=CC",
+                    "cc", "-g", "-O",
+                    "-c", "blosc_filter.c")
+            libtool("--mode=link", "--tag=CC",
+                    "cc", "-g", "-O",
+                    "-rpath", prefix.lib,
+                    "-o", "libblosc_filter.la",
+                    "blosc_filter.lo",
+                    "-L%s" % spec["c-blosc"].prefix.lib, "-lblosc",
+                    "-L%s" % spec["hdf5"].prefix.lib, "-lhdf5")
+            _install_shlib("libblosc_filter", ".libs", prefix.lib)
+
+            # Build and install plugin
+            # The plugin requires at least HDF5 1.8.11:
+            if spec["hdf5"].satisfies("@1.8.11:"):
+                libtool("--mode=compile", "--tag=CC",
+                        "cc", "-g", "-O",
+                        "-c", "blosc_plugin.c")
+                libtool("--mode=link", "--tag=CC",
+                        "cc", "-g", "-O",
+                        "-rpath", prefix.lib,
+                        "-o", "libblosc_plugin.la",
+                        "blosc_plugin.lo",
+                        "-L%s" % prefix.lib, "-lblosc_filter",
+                        "-L%s" % spec["c-blosc"].prefix.lib, "-lblosc",
+                        "-L%s" % spec["hdf5"].prefix.lib, "-lhdf5")
+                _install_shlib("libblosc_plugin", ".libs", prefix.lib)
+
+        self.check_install(spec)
+
+    def check_install(self, spec):
+        "Build and run a small program to test the installed HDF5 Blosc plugin"
+        print "Checking HDF5-Blosc plugin..."
+        checkdir = "spack-check"
+        with working_dir(checkdir, create=True):
+            source = r"""\
+#include <hdf5.h>
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define FILTER_BLOSC 32001   /* Blosc filter ID registered with the HDF group */
+
+int main(int argc, char **argv) {
+  herr_t herr;
+  hid_t file = H5Fcreate("file.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+  assert(file >= 0);
+  hsize_t dims[3] = {10, 10, 10};
+  hid_t space = H5Screate_simple(3, dims, NULL);
+  assert(space >= 0);
+  hid_t create_proplist = H5Pcreate(H5P_DATASET_CREATE);
+  assert(create_proplist >= 0);
+  herr = H5Pset_chunk(create_proplist, 3, dims);
+  assert(herr >= 0);
+  herr = H5Pset_filter(create_proplist, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 0,
+                       NULL);
+  assert(herr >= 0);
+  htri_t all_filters_avail = H5Pall_filters_avail(create_proplist);
+  assert(all_filters_avail > 0);
+  hid_t dataset = H5Dcreate(file, "dataset", H5T_NATIVE_DOUBLE, space,
+                            H5P_DEFAULT, create_proplist, H5P_DEFAULT);
+  assert(dataset >= 0);
+  double data[10][10][10];
+  for (int k=0; k<10; ++k) {
+    for (int j=0; j<10; ++j) {
+      for (int i=0; i<10; ++i) {
+        data[k][j][i] = 1.0 / (1.0 + i + j + k);
+      }
+    }
+  }
+  herr = H5Dwrite(dataset, H5T_NATIVE_DOUBLE, space, space, H5P_DEFAULT,
+                  &data[0][0][0]);
+  assert(herr >= 0);
+  herr = H5Pclose(create_proplist);
+  assert(herr >= 0);
+  herr = H5Dclose(dataset);
+  assert(herr >= 0);
+  herr = H5Sclose(space);
+  assert(herr >= 0);
+  herr = H5Fclose(file);
+  assert(herr >= 0);
+  printf("Done.\n");
+  return 0;
+}
+"""
+            expected = """\
+Done.
+"""
+            with open("check.c", "w") as f:
+                f.write(source)
+            if "+mpi" in spec["hdf5"]:
+                cc = which("mpicc")
+            else:
+                cc = which("cc")
+            # TODO: Automate these path and library settings
+            cc("-c", "-I%s" % spec["hdf5"].prefix.include, "check.c")
+            cc("-o", "check", "check.o",
+               "-L%s" % spec["hdf5"].prefix.lib, "-lhdf5")
+            try:
+                check = Executable("./check")
+                output = check(return_output=True)
+            except:
+                output = ""
+            success = output == expected
+            if not success:
+                print "Produced output does not match expected output."
+                print "Expected output:"
+                print "-"*80
+                print expected
+                print "-"*80
+                print "Produced output:"
+                print "-"*80
+                print output
+                print "-"*80
+                print "Environment:"
+                env = which("env")
+                env()
+                raise RuntimeError("HDF5 Blosc plugin check failed")
+        shutil.rmtree(checkdir)
+
+    def setup_environment(self, spack_env, run_env):
+        spack_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
+        run_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
+
+    def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+        spack_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
+        run_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py
index 21137ef356ba4af0bbc45fac09feb68992ad9f3d..e46f432be504389f33f7d646d1ed064df01f02a8 100644
--- a/var/spack/repos/builtin/packages/hdf5/package.py
+++ b/var/spack/repos/builtin/packages/hdf5/package.py
@@ -38,6 +38,7 @@ class Hdf5(Package):
     list_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
     list_depth = 3
 
+    version('1.10.0-patch1', '9180ff0ef8dc2ef3f61bd37a7404f295')
     version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199')
     version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618', preferred=True)
     version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
diff --git a/var/spack/repos/builtin/packages/hpl/package.py b/var/spack/repos/builtin/packages/hpl/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..efd5c8bb1dad3b3f372408dba1293242980e78f5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hpl/package.py
@@ -0,0 +1,119 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import platform
+
+
+class Hpl(Package):
+    """HPL is a software package that solves a (random) dense linear system
+    in double precision (64 bits) arithmetic on distributed-memory computers.
+    It can thus be regarded as a portable as well as freely available
+    implementation of the High Performance Computing Linpack Benchmark."""
+
+    homepage = "http://www.netlib.org/benchmark/hpl/"
+    url      = "http://www.netlib.org/benchmark/hpl/hpl-2.2.tar.gz"
+
+    version('2.2', '0eb19e787c3dc8f4058db22c9e0c5320')
+
+    variant('openmp', default=False, description='Enable OpenMP support')
+
+    depends_on('mpi@1.1:')
+    depends_on('blas')
+
+    parallel = False
+
+    def configure(self, spec, arch):
+        # List of configuration options
+        # Order is important
+        config = []
+
+        # OpenMP support
+        if '+openmp' in spec:
+            config.append(
+                'OMP_DEFS     = {0}'.format(self.compiler.openmp_flag)
+            )
+
+        config.extend([
+            # Shell
+            'SHELL        = /bin/sh',
+            'CD           = cd',
+            'CP           = cp',
+            'LN_S         = ln -fs',
+            'MKDIR        = mkdir -p',
+            'RM           = /bin/rm -f',
+            'TOUCH        = touch',
+            # Platform identifier
+            'ARCH         = {0}'.format(arch),
+            # HPL Directory Structure / HPL library
+            'TOPdir       = {0}'.format(os.getcwd()),
+            'INCdir       = $(TOPdir)/include',
+            'BINdir       = $(TOPdir)/bin/$(ARCH)',
+            'LIBdir       = $(TOPdir)/lib/$(ARCH)',
+            'HPLlib       = $(LIBdir)/libhpl.a',
+            # Message Passing library (MPI)
+            'MPinc        = -I{0}'.format(spec['mpi'].prefix.include),
+            'MPlib        = -L{0}'.format(spec['mpi'].prefix.lib),
+            # Linear Algebra library (BLAS or VSIPL)
+            'LAinc        = {0}'.format(spec['blas'].prefix.include),
+            'LAlib        = {0}'.format(spec['blas'].blas_shared_lib),
+            # F77 / C interface
+            'F2CDEFS      = -DAdd_ -DF77_INTEGER=int -DStringSunStyle',
+            # HPL includes / libraries / specifics
+            'HPL_INCLUDES = -I$(INCdir) -I$(INCdir)/$(ARCH) ' +
+            '-I$(LAinc) -I$(MPinc)',
+            'HPL_LIBS     = $(HPLlib) $(LAlib) $(MPlib)',
+            'HPL_OPTS     = -DHPL_DETAILED_TIMING -DHPL_PROGRESS_REPORT',
+            'HPL_DEFS     = $(F2CDEFS) $(HPL_OPTS) $(HPL_INCLUDES)',
+            # Compilers / linkers - Optimization flags
+            'CC           = {0}'.format(spec['mpi'].mpicc),
+            'CCNOOPT      = $(HPL_DEFS)',
+            'CCFLAGS      = $(HPL_DEFS) -O3',
+            'LINKER       = $(CC)',
+            'LINKFLAGS    = $(CCFLAGS) $(OMP_DEFS)',
+            'ARCHIVER     = ar',
+            'ARFLAGS      = r',
+            'RANLIB       = echo'
+        ])
+
+        # Write configuration options to include file
+        with open('Make.{0}'.format(arch), 'w') as makefile:
+            for var in config:
+                makefile.write('{0}\n'.format(var))
+
+    def install(self, spec, prefix):
+        # Arch used for file naming purposes only
+        arch = '{0}-{1}'.format(platform.system(), platform.processor())
+
+        # Generate Makefile include
+        self.configure(spec, arch)
+
+        make('arch={0}'.format(arch))
+
+        # Manual installation
+        install_tree(join_path('bin', arch), prefix.bin)
+        install_tree(join_path('lib', arch), prefix.lib)
+        install_tree(join_path('include', arch), prefix.include)
+        install_tree('man', prefix.man)
diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..493ca16417c929e503dfa938b02f46aa9b51dab1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
@@ -0,0 +1,144 @@
+from spack import *
+import os
+import re
+
+from spack.pkg.builtin.intel import IntelInstaller, filter_pick, \
+    get_all_components
+
+
+class IntelParallelStudio(IntelInstaller):
+    """Intel Parallel Studio.
+
+    Note: You will have to add the download file to a
+    mirror so that Spack can find it. For instructions on how to set up a
+    mirror, see http://software.llnl.gov/spack/mirrors.html"""
+
+    homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
+
+    # TODO: can also try the online installer (will download files on demand)
+    version('composer.2016.2', '1133fb831312eb519f7da897fec223fa',
+        url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz"  # NOQA: ignore=E501
+        % os.getcwd())
+    version('professional.2016.2', '70be832f2d34c9bf596a5e99d5f2d832',
+        url="file://%s/parallel_studio_xe_2016_update2.tgz" % os.getcwd())  # NOQA: ignore=E501
+    version('cluster.2016.2', '70be832f2d34c9bf596a5e99d5f2d832',
+        url="file://%s/parallel_studio_xe_2016_update2.tgz" % os.getcwd())  # NOQA: ignore=E501
+    version('composer.2016.3', '3208eeabee951fc27579177b593cefe9',
+        url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz"  # NOQA: ignore=E501
+        % os.getcwd())
+    version('professional.2016.3', 'eda19bb0d0d19709197ede58f13443f3',
+        url="file://%s/parallel_studio_xe_2016_update3.tgz" % os.getcwd())  # NOQA: ignore=E501
+    version('cluster.2016.3', 'eda19bb0d0d19709197ede58f13443f3',
+        url="file://%s/parallel_studio_xe_2016_update3.tgz" % os.getcwd())  # NOQA: ignore=E501
+
+    variant('rpath', default=True, description="Add rpath to .cfg files")
+    variant('newdtags', default=False,
+            description="Allow use of --enable-new-dtags in MPI wrappers")
+    variant('all', default=False,
+            description="Install all files with the requested edition")
+    variant('mpi', default=True,
+            description="Install the Intel MPI library and ITAC tool")
+    variant('mkl', default=True, description="Install the Intel MKL library")
+    variant('daal',
+            default=True, description="Install the Intel DAAL libraries")
+    variant('ipp', default=True, description="Install the Intel IPP libraries")
+    variant('tools', default=True, description="""Install the Intel Advisor,\
+VTune Amplifier, and Inspector tools""")
+
+    provides('mpi', when='@cluster:+mpi')
+    provides('mkl', when='+mkl')
+    provides('daal', when='+daal')
+    provides('ipp', when='+ipp')
+
+    def install(self, spec, prefix):
+
+        base_components = "ALL"  # when in doubt, install everything
+        mpi_components = ""
+        mkl_components = ""
+        daal_components = ""
+        ipp_components = ""
+
+        if spec.satisfies('+all'):
+            base_components = "ALL"
+        else:
+            all_components = get_all_components()
+            regex = '(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)'
+            base_components = \
+                filter_pick(all_components, re.compile(regex).search)
+            regex = '(icsxe|imb|mpi|itac|intel-tc|clck)'
+            mpi_components = \
+                filter_pick(all_components, re.compile(regex).search)
+            mkl_components = \
+                filter_pick(all_components, re.compile('(mkl)').search)
+            daal_components = \
+                filter_pick(all_components, re.compile('(daal)').search)
+            ipp_components = \
+                filter_pick(all_components, re.compile('(ipp)').search)
+            regex = '(gdb|vtune|inspector|advisor)'
+            tool_components = \
+                filter_pick(all_components, re.compile(regex).search)
+
+        components = base_components
+        if not spec.satisfies('+all'):
+            if spec.satisfies('+mpi') and 'cluster' in str(spec.version):
+                components += mpi_components
+            if spec.satisfies('+mkl'):
+                components += mkl_components
+            if spec.satisfies('+daal'):
+                components += daal_components
+            if spec.satisfies('+ipp'):
+                components += ipp_components
+            if spec.satisfies('+tools') and (spec.satisfies('@cluster') or
+               spec.satisfies('@professional')):
+                components += tool_components
+
+        self.intel_components = ';'.join(components)
+        IntelInstaller.install(self, spec, prefix)
+
+        absbindir = os.path.dirname(os.path.realpath(os.path.join(
+            self.prefix.bin, "icc")))
+        abslibdir = os.path.dirname(os.path.realpath(os.path.join
+                                    (self.prefix.lib, "intel64", "libimf.a")))
+
+        os.symlink(self.global_license_file, os.path.join(absbindir,
+                                                          "license.lic"))
+        if spec.satisfies('+tools') and (spec.satisfies('@cluster') or
+                                         spec.satisfies('@professional')):
+            os.mkdir(os.path.join(self.prefix, "inspector_xe/licenses"))
+            os.symlink(self.global_license_file, os.path.join(
+                self.prefix, "inspector_xe/licenses", "license.lic"))
+            os.mkdir(os.path.join(self.prefix, "advisor_xe/licenses"))
+            os.symlink(self.global_license_file, os.path.join(
+                self.prefix, "advisor_xe/licenses", "license.lic"))
+            os.mkdir(os.path.join(self.prefix, "vtune_amplifier_xe/licenses"))
+            os.symlink(self.global_license_file, os.path.join(
+                self.prefix, "vtune_amplifier_xe/licenses", "license.lic"))
+
+        if (spec.satisfies('+all') or spec.satisfies('+mpi')) and \
+                spec.satisfies('@cluster'):
+                os.symlink(self.global_license_file, os.path.join(
+                           self.prefix, "itac_latest", "license.lic"))
+                if spec.satisfies('~newdtags'):
+                    wrappers = ["mpif77", "mpif77", "mpif90", "mpif90",
+                                "mpigcc", "mpigcc", "mpigxx", "mpigxx",
+                                "mpiicc", "mpiicc", "mpiicpc", "mpiicpc",
+                                "mpiifort", "mpiifort"]
+                    wrapper_paths = []
+                    for root, dirs, files in os.walk(spec.prefix):
+                        for name in files:
+                            if name in wrappers:
+                                wrapper_paths.append(os.path.join(spec.prefix,
+                                                                  root, name))
+                    for wrapper in wrapper_paths:
+                        filter_file(r'-Xlinker --enable-new-dtags', r' ',
+                                    wrapper)
+
+        if spec.satisfies('+rpath'):
+            for compiler_command in ["icc", "icpc", "ifort"]:
+                cfgfilename = os.path.join(absbindir, "%s.cfg" %
+                                           compiler_command)
+                with open(cfgfilename, "w") as f:
+                    f.write('-Xlinker -rpath -Xlinker %s\n' % abslibdir)
+
+        os.symlink(os.path.join(self.prefix.man, "common", "man1"),
+                   os.path.join(self.prefix.man, "man1"))
diff --git a/var/spack/repos/builtin/packages/intel/package.py b/var/spack/repos/builtin/packages/intel/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec3192380ae90310780defe0c7366acb9852f8ab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/intel/package.py
@@ -0,0 +1,125 @@
+from spack import *
+import os
+import re
+
+
+def filter_pick(input_list, regex_filter):
+    """Returns the items in input_list that are found in the regex_filter"""
+    return [l for l in input_list for m in (regex_filter(l),) if m]
+
+
+def unfilter_pick(input_list, regex_filter):
+    """Returns the items in input_list that are not found in the
+       regex_filter"""
+    return [l for l in input_list for m in (regex_filter(l),) if not m]
+
+
+def get_all_components():
+    """Returns a list of all the components associated with the downloaded
+       Intel package"""
+    all_components = []
+    with open("pset/mediaconfig.xml", "r") as f:
+        lines = f.readlines()
+        for line in lines:
+            if line.find('<Abbr>') != -1:
+                component = line[line.find('<Abbr>') + 6:line.find('</Abbr>')]
+                all_components.append(component)
+    return all_components
+
+
+class IntelInstaller(Package):
+    """Base package containing common methods for installing Intel software"""
+
+    homepage = "https://software.intel.com/en-us"
+    intel_components = "ALL"
+    license_required = True
+    license_comment = '#'
+    license_files = ['Licenses/license.lic']
+    license_vars = ['INTEL_LICENSE_FILE']
+    license_url = \
+        'https://software.intel.com/en-us/articles/intel-license-manager-faq'
+
+    @property
+    def global_license_file(self):
+        """Returns the path where a global license file should be stored."""
+        if not self.license_files:
+            return
+        return join_path(self.global_license_dir, "intel",
+                         os.path.basename(self.license_files[0]))
+
+    def install(self, spec, prefix):
+
+        # Remove the installation DB, otherwise it will try to install into
+        # location of other Intel builds
+        if os.path.exists(os.path.join(os.environ["HOME"], "intel",
+                          "intel_sdp_products.db")):
+            os.remove(os.path.join(os.environ["HOME"], "intel",
+                      "intel_sdp_products.db"))
+
+        if not hasattr(self, "intel_prefix"):
+            self.intel_prefix = self.prefix
+
+        silent_config_filename = 'silent.cfg'
+        with open(silent_config_filename, 'w') as f:
+            f.write("""
+ACCEPT_EULA=accept
+PSET_MODE=install
+CONTINUE_WITH_INSTALLDIR_OVERWRITE=yes
+PSET_INSTALL_DIR=%s
+ACTIVATION_LICENSE_FILE=%s
+ACTIVATION_TYPE=license_file
+PHONEHOME_SEND_USAGE_DATA=no
+CONTINUE_WITH_OPTIONAL_ERROR=yes
+COMPONENTS=%s
+""" % (self.intel_prefix, self.global_license_file, self.intel_components))
+
+        install_script = which("install.sh")
+        install_script('--silent', silent_config_filename)
+
+
+class Intel(IntelInstaller):
+    """Intel Compilers.
+
+    Note: You will have to add the download file to a
+    mirror so that Spack can find it. For instructions on how to set up a
+    mirror, see http://software.llnl.gov/spack/mirrors.html"""
+
+    homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
+
+    # TODO: can also try the online installer (will download files on demand)
+    version('16.0.2', '1133fb831312eb519f7da897fec223fa',
+        url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz"  # NOQA: ignore=E501
+        % os.getcwd())
+    version('16.0.3', '3208eeabee951fc27579177b593cefe9',
+        url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz"  # NOQA: ignore=E501
+        % os.getcwd())
+
+    variant('rpath', default=True, description="Add rpath to .cfg files")
+
+    def install(self, spec, prefix):
+        components = []
+        all_components = get_all_components()
+        regex = '(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)'
+        components = filter_pick(all_components, re.compile(regex).search)
+
+        self.intel_components = ';'.join(components)
+        IntelInstaller.install(self, spec, prefix)
+
+        absbindir = os.path.split(os.path.realpath(os.path.join(
+            self.prefix.bin, "icc")))[0]
+        abslibdir = os.path.split(os.path.realpath(os.path.join(
+            self.prefix.lib, "intel64", "libimf.a")))[0]
+
+        # symlink or copy?
+        os.symlink(self.global_license_file, os.path.join(absbindir,
+                   "license.lic"))
+
+        if spec.satisfies('+rpath'):
+            for compiler_command in ["icc", "icpc", "ifort"]:
+                cfgfilename = os.path.join(absbindir, "%s.cfg" %
+                                           compiler_command)
+                with open(cfgfilename, "w") as f:
+                    f.write('-Xlinker -rpath -Xlinker %s\n' % abslibdir)
+
+        os.symlink(os.path.join(self.prefix.man, "common", "man1"),
+                   os.path.join(self.prefix.man, "man1"))
diff --git a/var/spack/repos/builtin/packages/ipp/package.py b/var/spack/repos/builtin/packages/ipp/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..2bd931d5bd8ba67703012e74febfbc206ab19b2d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ipp/package.py
@@ -0,0 +1,26 @@
+from spack import *
+import os
+
+from spack.pkg.builtin.intel import IntelInstaller
+
+
+class Ipp(IntelInstaller):
+    """Intel Integrated Performance Primitives.
+
+    Note: You will have to add the download file to a
+    mirror so that Spack can find it. For instructions on how to set up a
+    mirror, see http://software.llnl.gov/spack/mirrors.html"""
+
+    homepage = "https://software.intel.com/en-us/intel-ipp"
+
+    version('9.0.3.210', '0e1520dd3de7f811a6ef6ebc7aa429a3',
+            url="file://%s/l_ipp_9.0.3.210.tgz" % os.getcwd())
+
+    def install(self, spec, prefix):
+
+        self.intel_prefix = os.path.join(prefix, "pkg")
+        IntelInstaller.install(self, spec, prefix)
+
+        ipp_dir = os.path.join(self.intel_prefix, "ipp")
+        for f in os.listdir(ipp_dir):
+            os.symlink(os.path.join(ipp_dir, f), os.path.join(self.prefix, f))
diff --git a/var/spack/repos/builtin/packages/libdwarf/package.py b/var/spack/repos/builtin/packages/libdwarf/package.py
index 3f5a72116e9cdb7bf039fd0596c60f029e9bc35a..594271f655fcd826a5c605b2101ec07eb39dd496 100644
--- a/var/spack/repos/builtin/packages/libdwarf/package.py
+++ b/var/spack/repos/builtin/packages/libdwarf/package.py
@@ -23,11 +23,11 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack import *
-import os
 
 # Only build certain parts of dwarf because the other ones break.
 dwarf_dirs = ['libdwarf', 'dwarfdump2']
 
+
 class Libdwarf(Package):
     """The DWARF Debugging Information Format is of interest to
        programmers working on compilers and debuggers (and any one
@@ -41,18 +41,17 @@ class Libdwarf(Package):
        MIPS/IRIX C compiler."""
 
     homepage = "http://www.prevanders.net/dwarf.html"
-    url      = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
+    url      = "http://www.prevanders.net/libdwarf-20160507.tar.gz"
     list_url = homepage
 
+    version('20160507', 'ae32d6f9ece5daf05e2d4b14822ea811')
     version('20130729', '4cc5e48693f7b93b7aa0261e63c0e21d')
     version('20130207', '64b42692e947d5180e162e46c689dfbf')
     version('20130126', 'ded74a5e90edb5a12aac3c29d260c5db')
-
     depends_on("libelf")
 
     parallel = False
 
-
     def install(self, spec, prefix):
         # dwarf build does not set arguments for ar properly
         make.add_default_arg('ARFLAGS=rcs')
@@ -69,7 +68,11 @@ def install(self, spec, prefix):
             install('libdwarf.h',  prefix.include)
             install('dwarf.h',     prefix.include)
 
-        with working_dir('dwarfdump2'):
+        if spec.satisfies('@20130126:20130729'):
+            dwarfdump_dir = 'dwarfdump2'
+        else:
+            dwarfdump_dir = 'dwarfdump'
+        with working_dir(dwarfdump_dir):
             configure("--prefix=" + prefix)
 
             # This makefile has strings of copy commands that
diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py
index 42e8711a7df98b71c510a99287c81c711e1f8854..91cef95cec2d5548e6a5d3d6f4366233fd40b300 100644
--- a/var/spack/repos/builtin/packages/libpciaccess/package.py
+++ b/var/spack/repos/builtin/packages/libpciaccess/package.py
@@ -37,7 +37,7 @@ class Libpciaccess(Package):
 
     def install(self, spec, prefix):
         # libpciaccess does not support OS X
-        if spec.satisfies('arch=darwin-x86_64'):
+        if spec.satisfies('platform=darwin'):
             # create a dummy directory
             mkdir(prefix.lib)
             return
diff --git a/var/spack/repos/builtin/packages/libpthread-stubs/package.py b/var/spack/repos/builtin/packages/libpthread-stubs/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..4bcca43c246b25948ab77b895d7eda83fdbb2ddd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libpthread-stubs/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class LibpthreadStubs(Package):
+    """The libpthread-stubs package provides weak aliases for pthread
+       functions not provided in libc or otherwise available by
+       default. """
+    homepage = "http://xcb.freedesktop.org/"
+    url      = "http://xcb.freedesktop.org/dist/libpthread-stubs-0.1.tar.bz2"
+
+    version('0.3', 'e8fa31b42e13f87e8f5a7a2b731db7ee')
+
+    def install(self, spec, prefix):
+        configure('--prefix=%s' % prefix)
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/libxau/package.py b/var/spack/repos/builtin/packages/libxau/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..55816ecdbd446907cf15d6dffb9cb82d3396e351
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxau/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class Libxau(Package):
+    """The libXau package contains a library implementing the X11
+       Authorization Protocol. This is useful for restricting client
+       access to the display."""
+    homepage = "http://xcb.freedesktop.org/"
+    url      = "http://ftp.x.org/pub/individual/lib/libXau-1.0.8.tar.bz2"
+
+    version('1.0.8', '685f8abbffa6d145c0f930f00703b21b')
+
+    depends_on('xproto')
+
+    def install(self, spec, prefix):
+        # FIXME: Modify the configure line to suit your build system here.
+        configure('--prefix=%s' % prefix)
+
+        # FIXME: Add logic to build and install here
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py
index 0f39bb0f1d01f6fffb3d7207ea1568f53a62b217..746d4567e200fbb9fbc9a92a3933b1b13cd7edd9 100644
--- a/var/spack/repos/builtin/packages/libxcb/package.py
+++ b/var/spack/repos/builtin/packages/libxcb/package.py
@@ -35,18 +35,19 @@ class Libxcb(Package):
 
     version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb')
     version('1.11.1', '118623c15a96b08622603a71d8789bf3')
+
     depends_on("python")
     depends_on("xcb-proto")
     depends_on("pkg-config")
-
-    # depends_on('pthread')    # Ubuntu: apt-get install libpthread-stubs0-dev
-    # depends_on('xau')        # Ubuntu: apt-get install libxau-dev
+    depends_on("libpthread-stubs")
+    depends_on('libxau')
 
     def patch(self):
         filter_file('typedef struct xcb_auth_info_t {', 'typedef struct {', 'src/xcb.h')
 
 
     def install(self, spec, prefix):
+        env['PKG_CONFIG_PATH'] = env['PKG_CONFIG_PATH'] + ':/usr/lib64/pkgconfig'
         configure("--prefix=%s" % prefix)
 
         make()
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
index c090c131c6ad3ff5649bbd5f9b34979f48da8bbb..7a6ea7401cee6f94ad920b57b013478b5425fbac 100644
--- a/var/spack/repos/builtin/packages/llvm/package.py
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -23,7 +23,7 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack import *
-import os, shutil
+import os, glob
 
 
 class Llvm(Package):
@@ -46,7 +46,9 @@ class Llvm(Package):
     variant('libcxx', default=True, description="Build the LLVM C++ standard library")
     variant('compiler-rt', default=True, description="Build the LLVM compiler runtime, including sanitizers")
     variant('gold', default=True, description="Add support for LTO with the gold linker plugin")
-
+    variant('shared_libs', default=False, description="Build all components as shared libraries, faster, less memory to build, less stable")
+    variant('link_dylib', default=False, description="Build and link the libLLVM shared library rather than static")
+    variant('all_targets', default=True, description="Build all supported targets, default targets <current arch>,NVPTX,AMDGPU,CppBackend")
 
     # Build dependency
     depends_on('cmake @2.8.12.2:')
@@ -257,6 +259,28 @@ def install(self, spec, prefix):
         if '+compiler-rt' not in spec:
             cmake_args.append('-DLLVM_EXTERNAL_COMPILER_RT_BUILD:Bool=OFF')
 
+        if '+shared_libs' in spec:
+            cmake_args.append('-DBUILD_SHARED_LIBS:Bool=ON')
+
+        if '+link_dylib' in spec:
+            cmake_args.append('-DLLVM_LINK_LLVM_DYLIB:Bool=ON')
+
+        if '+all_targets' not in spec: # all is default on cmake
+            targets = ['CppBackend', 'NVPTX', 'AMDGPU']
+            if 'x86' in spec.architecture.target.lower():
+                targets.append('X86')
+            elif 'arm' in spec.architecture.target.lower():
+                targets.append('ARM')
+            elif 'aarch64' in spec.architecture.target.lower():
+                targets.append('AArch64')
+            elif 'sparc' in spec.architecture.target.lower():
+                targets.append('sparc')
+            elif ('ppc' in spec.architecture.target.lower() or
+                  'power' in spec.architecture.target.lower()):
+                targets.append('PowerPC')
+
+            cmake_args.append('-DLLVM_TARGETS_TO_BUILD:Bool=' + ';'.join(targets))
+
         if  '+clang' not in spec:
             if '+clang_extra' in spec:
                 raise SpackException('The clang_extra variant requires the clang variant to be selected')
@@ -267,7 +291,5 @@ def install(self, spec, prefix):
             cmake(*cmake_args)
             make()
             make("install")
-            query_path = os.path.join('bin', 'clang-query')
-            # Manually install clang-query, because llvm doesn't...
-            if os.path.exists(query_path):
-                shutil.copy(query_path, os.path.join(prefix, 'bin'))
+            cp = which('cp')
+            cp('-a', 'bin/', prefix)
diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py
index 0a8b9b4577daf7839bc9a4cad04fbe242012fa8e..7d75866d52fe73ec8f6a4b63ac02734aad49fa3e 100644
--- a/var/spack/repos/builtin/packages/lmod/package.py
+++ b/var/spack/repos/builtin/packages/lmod/package.py
@@ -23,7 +23,7 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack import *
-import os
+
 
 class Lmod(Package):
     """
@@ -34,17 +34,25 @@ class Lmod(Package):
     variable. Modulefiles for Library packages provide environment variables
     that specify where the library and header files can be found.
     """
-    homepage = "https://www.tacc.utexas.edu/research-development/tacc-projects/lmod"
-    url      = "http://sourceforge.net/projects/lmod/files/Lmod-6.0.1.tar.bz2/download"
+    homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod'  # NOQA: ignore=E501
+    url = 'https://github.com/TACC/Lmod/archive/6.4.1.tar.gz'
 
+    version('6.4.1', '7978ba777c8aa41a4d8c05fec5f780f4')
+    version('6.3.7', '0fa4d5a24c41cae03776f781aa2dedc1')
     version('6.0.1', '91abf52fe5033bd419ffe2842ebe7af9')
 
-    depends_on("lua@5.2:")
+    depends_on('lua@5.2:')
+    depends_on('lua-luaposix')
+    depends_on('lua-luafilesystem')
+
+    parallel = False
+
+    def setup_environment(self, spack_env, run_env):
+        stage_lua_path = join_path(
+            self.stage.path, 'Lmod-{version}', 'src', '?.lua')
+        spack_env.append_path('LUA_PATH', stage_lua_path.format(
+            version=self.version), separator=';')
 
     def install(self, spec, prefix):
-        # Add our lua to PATH
-        os.environ['PATH'] = spec['lua'].prefix.bin + ';' + os.environ['PATH']
-        
         configure('--prefix=%s' % prefix)
-        make()
-        make("install")
+        make('install')
diff --git a/var/spack/repos/builtin/packages/lua-luafilesystem/package.py b/var/spack/repos/builtin/packages/lua-luafilesystem/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..a61b9dd6753481d2751437d583c0647e21763ec3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lua-luafilesystem/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class LuaLuafilesystem(Package):
+    """
+    LuaFileSystem is a Lua library developed to complement the set of
+    functions related to file systems offered by the standard Lua distribution.
+
+    LuaFileSystem offers a portable way to access the underlying directory
+    structure and file attributes.
+
+    LuaFileSystem is free software and uses the same license as Lua 5.1
+    """
+    homepage = 'http://keplerproject.github.io/luafilesystem'
+    url = 'https://github.com/keplerproject/luafilesystem/archive/v_1_6_3.tar.gz'
+
+    version('1_6_3', 'd0552c7e5a082f5bb2865af63fb9dc95')
+
+    extends('lua')
+
+    def install(self, spec, prefix):
+        rockspec_fmt = join_path(self.stage.path,
+                                 'luafilesystem-v_{version.underscored}',
+                                 'rockspecs',
+                                 'luafilesystem-{version.dotted}-1.rockspec')
+        luarocks('--tree=' + prefix, 'install',
+                 rockspec_fmt.format(version=self.spec.version))
diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py
index 170f90516ab140ea54880052113f9c23dd1832bf..761932361bcfaab65b503e244c6730d1bb087a29 100644
--- a/var/spack/repos/builtin/packages/lua/package.py
+++ b/var/spack/repos/builtin/packages/lua/package.py
@@ -57,7 +57,7 @@ class Lua(Package):
         placement='luarocks')
 
     def install(self, spec, prefix):
-        if spec.satisfies("arch=darwin-i686") or spec.satisfies("arch=darwin-x86_64"):
+        if spec.satisfies("platform=darwin"):
             target = 'macosx'
         else:
             target = 'linux'
@@ -105,6 +105,9 @@ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
         spack_env.set('LUA_PATH', ';'.join(lua_patterns), separator=';')
         spack_env.set('LUA_CPATH', ';'.join(lua_cpatterns), separator=';')
 
+        # Add LUA to PATH for dependent packages
+        spack_env.prepend_path('PATH', self.prefix.bin)
+
         # For run time environment set only the path for extension_spec and
         # prepend it to LUAPATH
         if extension_spec.package.extends(self.spec):
@@ -153,5 +156,5 @@ def setup_dependent_package(self, module, ext_spec):
         """
         # Lua extension builds can have lua and luarocks executable functions
         module.lua = Executable(join_path(self.spec.prefix.bin, 'lua'))
-        module.luarocks = Executable(join_path(self.spec.prefix.bin,
-                                               'luarocks'))
+        module.luarocks = Executable(
+            join_path(self.spec.prefix.bin, 'luarocks'))
diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py
index 061179b78e5a3d0d21213da97dd3fbe8087d3536..c4f2afaff258e99c96ca2074e016de9857b2c93d 100644
--- a/var/spack/repos/builtin/packages/metis/package.py
+++ b/var/spack/repos/builtin/packages/metis/package.py
@@ -24,55 +24,61 @@
 ##############################################################################
 
 from spack import *
-import glob, sys, os
+import glob
+import sys
+import os
+
 
 class Metis(Package):
-    """
-    METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, and producing fill
-    reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the multilevel
-    recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes.
-    """
+    """METIS is a set of serial programs for partitioning graphs, partitioning
+       finite element meshes, and producing fill reducing orderings for sparse
+       matrices. The algorithms implemented in METIS are based on the
+       multilevel recursive-bisection, multilevel k-way, and multi-constraint
+       partitioning schemes."""
 
-    homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview'
-    url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz"
+    homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview"
+    base_url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis"
 
-    version('5.1.0', '5465e67079419a69e0116de24fce58fe',
-            url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz')
-    version('4.0.3', '5efa35de80703c1b2c4d0de080fafbcf4e0d363a21149a1ad2f96e0144841a55',
-            url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD/metis-4.0.3.tar.gz')
+    version('5.1.0', '5465e67079419a69e0116de24fce58fe')
+    version('5.0.2', 'acb521a4e8c2e6dd559a7f9abd0468c5')
+    version('4.0.3', 'd3848b454532ef18dc83e4fb160d1e10')
 
     variant('shared', default=True, description='Enables the build of shared libraries')
     variant('debug', default=False, description='Builds the library in debug mode')
     variant('gdb', default=False, description='Enables gdb support')
 
     variant('idx64', default=False, description='Use int64_t as default index type')
-    variant('double', default=False, description='Use double precision floating point types')
+    variant('real64', default=False, description='Use double precision floating point types')
 
-    depends_on('cmake @2.8:', when='@5:')  # build-time dependency
-    depends_on('gdb', when='+gdb')
+    depends_on('cmake@2.8:', when='@5:')  # build-time dependency
 
     patch('install_gklib_defs_rename.patch', when='@5:')
 
+    def url_for_version(self, version):
+        verdir = 'OLD/' if version < Version('4.0.3') else ''
+        return '%s/%smetis-%s.tar.gz' % (Metis.base_url, verdir, version)
 
-    @when('@4:4.0.3')
+    @when('@:4')
     def install(self, spec, prefix):
-
-        if '+gdb' in spec:
-            raise InstallError('gdb support not implemented in METIS 4!')
-        if '+idx64' in spec:
-            raise InstallError('idx64 option not implemented in METIS 4!')
-        if '+double' in spec:
-            raise InstallError('double option not implemented for METIS 4!')
+        # Process library spec and options
+        unsupp_vars = [v for v in ('+gdb', '+idx64', '+real64') if v in spec]
+        if unsupp_vars:
+            msg = 'Given variants %s are unsupported by METIS 4!' % unsupp_vars
+            raise InstallError(msg)
 
         options = ['COPTIONS=-fPIC']
         if '+debug' in spec:
             options.append('OPTFLAGS=-g -O0')
         make(*options)
 
+        # Compile and install library files
+        ccompile = Executable(self.compiler.cc)
+
         mkdir(prefix.bin)
-        for x in ('pmetis', 'kmetis', 'oemetis', 'onmetis', 'partnmesh',
-                  'partdmesh', 'mesh2nodal', 'mesh2dual', 'graphchk'):
-            install(x, prefix.bin)
+        binfiles = ('pmetis', 'kmetis', 'oemetis', 'onmetis', 'partnmesh',
+                    'partdmesh', 'mesh2nodal', 'mesh2dual', 'graphchk')
+        for binfile in binfiles:
+            install(binfile, prefix.bin)
 
         mkdir(prefix.lib)
         install('libmetis.a', prefix.lib)
@@ -82,106 +88,120 @@ def install(self, spec, prefix):
             install(h, prefix.include)
 
         mkdir(prefix.share)
-        for f in (join_path(*p)
-                  for p in (('Programs', 'io.c'),
-                            ('Test','mtest.c'),
-                            ('Graphs','4elt.graph'),
-                            ('Graphs', 'metis.mesh'),
-                            ('Graphs', 'test.mgraph'))):
-            install(f, prefix.share)
+        sharefiles = (('Graphs', '4elt.graph'), ('Graphs', 'metis.mesh'),
+                      ('Graphs', 'test.mgraph'))
+        for sharefile in tuple(join_path(*sf) for sf in sharefiles):
+            install(sharefile, prefix.share)
 
         if '+shared' in spec:
+            shared_flags = ['-fPIC', '-shared']
             if sys.platform == 'darwin':
-                lib_dsuffix = 'dylib'
-                load_flag = '-Wl,-all_load'
-                no_load_flag = ''
+                shared_suffix = 'dylib'
+                shared_flags.extend(['-Wl,-all_load', 'libmetis.a'])
             else:
-                lib_dsuffix = 'so'
-                load_flag = '-Wl,-whole-archive'
-                no_load_flag = '-Wl,-no-whole-archive'
+                shared_suffix = 'so'
+                shared_flags.extend(['-Wl,-whole-archive', 'libmetis.a',
+                                     '-Wl,-no-whole-archive'])
 
-            os.system(spack_cc + ' -fPIC -shared ' + load_flag +
-                      ' libmetis.a ' + no_load_flag + ' -o libmetis.' +
-                      lib_dsuffix)
-            install('libmetis.' + lib_dsuffix, prefix.lib)
+            shared_out = '%s/libmetis.%s' % (prefix.lib, shared_suffix)
+            shared_flags.extend(['-o', shared_out])
 
-        # Set up and run tests on installation
-        symlink(join_path(prefix.share, 'io.c'), 'io.c')
-        symlink(join_path(prefix.share, 'mtest.c'), 'mtest.c')
-        os.system(spack_cc + ' -I%s' % prefix.include + ' -c io.c')
-        os.system(spack_cc + ' -I%s' % prefix.include +
-                  ' -L%s' % prefix.lib + ' -lmetis mtest.c io.o -o mtest')
-        _4eltgraph = join_path(prefix.share, '4elt.graph')
-        test_mgraph = join_path(prefix.share, 'test.mgraph')
-        metis_mesh = join_path(prefix.share, 'metis.mesh')
-        kmetis = join_path(prefix.bin, 'kmetis')
-        os.system('./mtest ' + _4eltgraph)
-        os.system(kmetis + ' ' + _4eltgraph + ' 40')
-        os.system(join_path(prefix.bin, 'onmetis') + ' ' + _4eltgraph)
-        os.system(join_path(prefix.bin, 'pmetis') + ' ' + test_mgraph + ' 2')
-        os.system(kmetis + ' ' + test_mgraph + ' 2')
-        os.system(kmetis + ' ' + test_mgraph + ' 5')
-        os.system(join_path(prefix.bin, 'partnmesh') + metis_mesh + ' 10')
-        os.system(join_path(prefix.bin, 'partdmesh') + metis_mesh + ' 10')
-        os.system(join_path(prefix.bin, 'mesh2dual') + metis_mesh)
+            ccompile(*shared_flags)
 
+        # Set up and run tests on installation
+        ccompile('-I%s' % prefix.include, '-L%s' % prefix.lib,
+                 '-Wl,-rpath=%s' % (prefix.lib if '+shared' in spec else ''),
+                 join_path('Programs', 'io.o'), join_path('Test', 'mtest.c'),
+                 '-o', '%s/mtest' % prefix.bin, '-lmetis', '-lm')
+
+        test_bin = lambda testname: join_path(prefix.bin, testname)
+        test_graph = lambda graphname: join_path(prefix.share, graphname)
+
+        graph = test_graph('4elt.graph')
+        os.system('%s %s' % (test_bin('mtest'), graph))
+        os.system('%s %s 40' % (test_bin('kmetis'), graph))
+        os.system('%s %s' % (test_bin('onmetis'), graph))
+        graph = test_graph('test.mgraph')
+        os.system('%s %s 2' % (test_bin('pmetis'), graph))
+        os.system('%s %s 2' % (test_bin('kmetis'), graph))
+        os.system('%s %s 5' % (test_bin('kmetis'), graph))
+        graph = test_graph('metis.mesh')
+        os.system('%s %s 10' % (test_bin('partnmesh'), graph))
+        os.system('%s %s 10' % (test_bin('partdmesh'), graph))
+        os.system('%s %s' % (test_bin('mesh2dual'), graph))
+
+        # FIXME: The following code should replace the testing code in the
+        # block above since it causes installs to fail when one or more of the
+        # Metis tests fail, but it currently doesn't work because the 'mtest',
+        # 'onmetis', and 'partnmesh' tests return error codes that trigger
+        # false positives for failure.
+        """
+        Executable(test_bin('mtest'))(test_graph('4elt.graph'))
+        Executable(test_bin('kmetis'))(test_graph('4elt.graph'), '40')
+        Executable(test_bin('onmetis'))(test_graph('4elt.graph'))
+
+        Executable(test_bin('pmetis'))(test_graph('test.mgraph'), '2')
+        Executable(test_bin('kmetis'))(test_graph('test.mgraph'), '2')
+        Executable(test_bin('kmetis'))(test_graph('test.mgraph'), '5')
+
+        Executable(test_bin('partnmesh'))(test_graph('metis.mesh'), '10')
+        Executable(test_bin('partdmesh'))(test_graph('metis.mesh'), '10')
+        Executable(test_bin('mesh2dual'))(test_graph('metis.mesh'))
+        """
 
     @when('@5:')
     def install(self, spec, prefix):
-
         options = []
         options.extend(std_cmake_args)
 
         build_directory = join_path(self.stage.path, 'spack-build')
         source_directory = self.stage.source_path
 
-        options.append('-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=source_directory))
+        options.append('-DGKLIB_PATH:PATH=%s/GKlib' % source_directory)
         options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
 
         if '+shared' in spec:
             options.append('-DSHARED:BOOL=ON')
-
         if '+debug' in spec:
             options.extend(['-DDEBUG:BOOL=ON',
                             '-DCMAKE_BUILD_TYPE:STRING=Debug'])
-
         if '+gdb' in spec:
             options.append('-DGDB:BOOL=ON')
 
         metis_header = join_path(source_directory, 'include', 'metis.h')
-
         if '+idx64' in spec:
             filter_file('IDXTYPEWIDTH 32', 'IDXTYPEWIDTH 64', metis_header)
-
-        if '+double' in spec:
+        if '+real64' in spec:
             filter_file('REALTYPEWIDTH 32', 'REALTYPEWIDTH 64', metis_header)
 
         # Make clang 7.3 happy.
         # Prevents "ld: section __DATA/__thread_bss extends beyond end of file"
         # See upstream LLVM issue https://llvm.org/bugs/show_bug.cgi?id=27059
-        # Adopted from https://github.com/Homebrew/homebrew-science/blob/master/metis.rb
+        # and https://github.com/Homebrew/homebrew-science/blob/master/metis.rb
         if spec.satisfies('%clang@7.3.0'):
-            filter_file('#define MAX_JBUFS 128', '#define MAX_JBUFS 24', join_path(source_directory, 'GKlib', 'error.c'))
+            filter_file('#define MAX_JBUFS 128', '#define MAX_JBUFS 24',
+                        join_path(source_directory, 'GKlib', 'error.c'))
 
         with working_dir(build_directory, create=True):
             cmake(source_directory, *options)
             make()
-            make("install")
+            make('install')
+
             # now run some tests:
-            for f in ["4elt", "copter2", "mdual"]:
-                graph = join_path(source_directory,'graphs','%s.graph' % f)
-                Executable(join_path(prefix.bin,'graphchk'))(graph)
-                Executable(join_path(prefix.bin,'gpmetis'))(graph,'2')
-                Executable(join_path(prefix.bin,'ndmetis'))(graph)
+            for f in ['4elt', 'copter2', 'mdual']:
+                graph = join_path(source_directory, 'graphs', '%s.graph' % f)
+                Executable(join_path(prefix.bin, 'graphchk'))(graph)
+                Executable(join_path(prefix.bin, 'gpmetis'))(graph, '2')
+                Executable(join_path(prefix.bin, 'ndmetis'))(graph)
 
-            graph = join_path(source_directory,'graphs','test.mgraph')
-            Executable(join_path(prefix.bin,'gpmetis'))(graph,'2')
-            graph = join_path(source_directory,'graphs','metis.mesh')
-            Executable(join_path(prefix.bin,'mpmetis'))(graph,'2')
+            graph = join_path(source_directory, 'graphs', 'test.mgraph')
+            Executable(join_path(prefix.bin, 'gpmetis'))(graph, '2')
+            graph = join_path(source_directory, 'graphs', 'metis.mesh')
+            Executable(join_path(prefix.bin, 'mpmetis'))(graph, '2')
 
             # install GKlib headers, which will be needed for ParMETIS
-            GKlib_dist = join_path(prefix.include,'GKlib')
+            GKlib_dist = join_path(prefix.include, 'GKlib')
             mkdirp(GKlib_dist)
-            fs = glob.glob(join_path(source_directory,'GKlib',"*.h"))
-            for f in fs:
-                install(f, GKlib_dist)
+            hfiles = glob.glob(join_path(source_directory, 'GKlib', '*.h'))
+            for hfile in hfiles:
+                install(hfile, GKlib_dist)
diff --git a/var/spack/repos/builtin/packages/mkl/package.py b/var/spack/repos/builtin/packages/mkl/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..454e78d29ca4f8d0b211ec53b259c505930227f1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mkl/package.py
@@ -0,0 +1,28 @@
+from spack import *
+import os
+
+from spack.pkg.builtin.intel import IntelInstaller
+
+
+class Mkl(IntelInstaller):
+    """Intel Math Kernel Library.
+
+    Note: You will have to add the download file to a
+    mirror so that Spack can find it. For instructions on how to set up a
+    mirror, see http://software.llnl.gov/spack/mirrors.html"""
+
+    homepage = "https://software.intel.com/en-us/intel-mkl"
+
+    version('11.3.2.181', '536dbd82896d6facc16de8f961d17d65',
+            url="file://%s/l_mkl_11.3.2.181.tgz" % os.getcwd())
+    version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34',
+            url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd())
+
+    def install(self, spec, prefix):
+
+        self.intel_prefix = os.path.join(prefix, "pkg")
+        IntelInstaller.install(self, spec, prefix)
+
+        mkl_dir = os.path.join(self.intel_prefix, "mkl")
+        for f in os.listdir(mkl_dir):
+            os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f))
diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py
index f4997bdfa1924f59afda0980b643e86da7304893..d1944023d1773e3f358fa1bd64b70c73a3c046fc 100644
--- a/var/spack/repos/builtin/packages/mvapich2/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2/package.py
@@ -25,6 +25,7 @@
 from spack import *
 import os
 
+
 class Mvapich2(Package):
     """MVAPICH2 is an MPI implementation for Infiniband networks."""
     homepage = "http://mvapich.cse.ohio-state.edu/"
@@ -43,8 +44,9 @@ class Mvapich2(Package):
     variant('debug', default=False, description='Enables debug information and error messages at run-time')
 
     ##########
-    # TODO : Process managers should be grouped into the same variant, as soon as variant capabilities will be extended
-    # See https://groups.google.com/forum/#!topic/spack/F8-f8B4_0so
+    # TODO : Process managers should be grouped into the same variant,
+    # as soon as variant capabilities will be extended See
+    # https://groups.google.com/forum/#!topic/spack/F8-f8B4_0so
     SLURM = 'slurm'
     HYDRA = 'hydra'
     GFORKER = 'gforker'
@@ -57,7 +59,8 @@ class Mvapich2(Package):
     ##########
 
     ##########
-    # TODO : Network types should be grouped into the same variant, as soon as variant capabilities will be extended
+    # TODO : Network types should be grouped into the same variant, as
+    # soon as variant capabilities will be extended
     PSM = 'psm'
     SOCK = 'sock'
     NEMESISIBTCP = 'nemesisibtcp'
@@ -84,8 +87,8 @@ def url_for_version(self, version):
 
     @staticmethod
     def enabled(x):
-        """
-        Given a variant name returns the string that means the variant is enabled
+        """Given a variant name returns the string that means the variant is
+        enabled
 
         :param x: variant name
         :return:
@@ -93,8 +96,8 @@ def enabled(x):
         return '+' + x
 
     def set_build_type(self, spec, configure_args):
-        """
-        Appends to configure_args the flags that depends only on the build type (i.e. release or debug)
+        """Appends to configure_args the flags that depends only on the build
+        type (i.e. release or debug)
 
         :param spec: spec
         :param configure_args: list of current configure arguments
@@ -104,7 +107,8 @@ def set_build_type(self, spec, configure_args):
                 "--disable-fast",
                 "--enable-error-checking=runtime",
                 "--enable-error-messages=all",
-                "--enable-g=dbg", "--enable-debuginfo"  # Permits debugging with TotalView
+                # Permits debugging with TotalView
+                "--enable-g=dbg", "--enable-debuginfo"
             ]
         else:
             build_type_options = ["--enable-fast=all"]
@@ -112,25 +116,41 @@ def set_build_type(self, spec, configure_args):
         configure_args.extend(build_type_options)
 
     def set_process_manager(self, spec, configure_args):
-        """
-        Appends to configure_args the flags that will enable the appropriate process managers
+        """Appends to configure_args the flags that will enable the
+        appropriate process managers
 
         :param spec: spec
         :param configure_args: list of current configure arguments
         """
-        # Check that slurm variant is not activated together with other pm variants
-        has_slurm_incompatible_variants = any(self.enabled(x) in spec for x in Mvapich2.SLURM_INCOMPATIBLE_PMS)
-        if self.enabled(Mvapich2.SLURM) in spec and has_slurm_incompatible_variants:
-            raise RuntimeError(" %s : 'slurm' cannot be activated together with other process managers" % self.name)
+        # Check that slurm variant is not activated together with
+        # other pm variants
+        has_slurm_incompatible_variants = \
+            any(self.enabled(x) in spec
+                for x in Mvapich2.SLURM_INCOMPATIBLE_PMS)
+
+        if self.enabled(Mvapich2.SLURM) in spec and \
+           has_slurm_incompatible_variants:
+            raise RuntimeError(" %s : 'slurm' cannot be activated \
+            together with other process managers" % self.name)
 
         process_manager_options = []
+        # See: http://slurm.schedmd.com/mpi_guide.html#mvapich2
         if self.enabled(Mvapich2.SLURM) in spec:
-            process_manager_options = [
-                "--with-pm=slurm"
-            ]
+            if self.version > Version('2.0'):
+                process_manager_options = [
+                    "--with-pmi=pmi2",
+                    "--with-pm=slurm"
+                ]
+            else:
+                process_manager_options = [
+                    "--with-pmi=slurm",
+                    "--with-pm=no"
+                ]
+
         elif has_slurm_incompatible_variants:
             pms = []
-            # The variant name is equal to the process manager name in the configuration options
+            # The variant name is equal to the process manager name in
+            # the configuration options
             for x in Mvapich2.SLURM_INCOMPATIBLE_PMS:
                 if self.enabled(x) in spec:
                     pms.append(x)
@@ -146,7 +166,9 @@ def set_network_type(self, spec, configure_args):
             if self.enabled(x) in spec:
                 count += 1
         if count > 1:
-            raise RuntimeError('network variants are mutually exclusive (only one can be selected at a time)')
+            raise RuntimeError('network variants are mutually exclusive \
+            (only one can be selected at a time)')
+
         network_options = []
         # From here on I can suppose that only one variant has been selected
         if self.enabled(Mvapich2.PSM) in spec:
@@ -164,6 +186,11 @@ def set_network_type(self, spec, configure_args):
 
         configure_args.extend(network_options)
 
+    def setup_environment(self, spack_env, run_env):
+        if self.enabled(Mvapich2.SLURM) in self.spec and \
+           self.version > Version('2.0'):
+            run_env.set('SLURM_MPI_TYPE', 'pmi2')
+
     def setup_dependent_environment(self, spack_env, run_env, extension_spec):
         spack_env.set('MPICH_CC', spack_cc)
         spack_env.set('MPICH_CXX', spack_cxx)
@@ -178,7 +205,8 @@ def setup_dependent_package(self, module, dep_spec):
         self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
 
     def install(self, spec, prefix):
-        # we'll set different configure flags depending on our environment
+        # we'll set different configure flags depending on our
+        # environment
         configure_args = [
             "--prefix=%s" % prefix,
             "--enable-shared",
@@ -208,7 +236,6 @@ def install(self, spec, prefix):
 
         self.filter_compilers()
 
-
     def filter_compilers(self):
         """Run after install to make the MPI compilers use the
            compilers that Spack built the package with.
@@ -228,8 +255,17 @@ def filter_compilers(self):
         spack_f77 = os.environ['F77']
         spack_fc  = os.environ['FC']
 
-        kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True }
-        filter_file('CC="%s"' % spack_cc , 'CC="%s"'  % self.compiler.cc,  mpicc,  **kwargs)
-        filter_file('CXX="%s"'% spack_cxx, 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
-        filter_file('F77="%s"'% spack_f77, 'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
-        filter_file('FC="%s"' % spack_fc , 'FC="%s"'  % self.compiler.fc,  mpif90, **kwargs)
+        kwargs = {
+            'ignore_absent': True,
+            'backup': False,
+            'string': True
+        }
+
+        filter_file('CC="%s"' % spack_cc,
+                    'CC="%s"' % self.compiler.cc, mpicc, **kwargs)
+        filter_file('CXX="%s"' % spack_cxx,
+                    'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
+        filter_file('F77="%s"' % spack_f77,
+                    'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
+        filter_file('FC="%s"' % spack_fc,
+                    'FC="%s"' % self.compiler.fc, mpif90, **kwargs)
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index 22e49daaa7b6162470273a7a77a96a7cc8827eff..d09ebd673903cfbaef523d4c2b52bfa11d5ec544 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -48,6 +48,13 @@ class Openblas(Package):
     patch('make.patch')
 
     def install(self, spec, prefix):
+        # As of 06/2016 there is no mechanism to specify that packages which
+        # depends on Blas/Lapack need C or/and Fortran symbols. For now
+        # require both.
+        if self.compiler.f77 is None:
+            raise InstallError('OpenBLAS requires both C and Fortran ',
+                               'compilers!')
+
         # Configure fails to pick up fortran from FC=/abs/path/to/f77, but
         # works fine with FC=/abs/path/to/gfortran.
         # When mixing compilers make sure that
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index 0e3185db258e98fc5a4a31c142c6af49a5b26a83..be3d1342fcd839c4242ca48c6f60e821ac8b6db1 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -27,6 +27,26 @@
 from spack import *
 
 
+def _verbs_dir():
+    """
+    Try to find the directory where the OpenFabrics verbs package is
+    installed. Return None if not found.
+    """
+    try:
+        # Try to locate Verbs by looking for a utility in the path
+        ibv_devices = which("ibv_devices")
+        # Run it (silently) to ensure it works
+        ibv_devices(output=str, error=str)
+        # Get path to executable
+        path = ibv_devices.exe[0]
+        # Remove executable name and "bin" directory
+        path = os.path.dirname(path)
+        path = os.path.dirname(path)
+        return path
+    except:
+        return None
+
+
 class Openmpi(Package):
     """Open MPI is a project combining technologies and resources from
        several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI)
@@ -54,7 +74,7 @@ class Openmpi(Package):
     variant('psm', default=False, description='Build support for the PSM library.')
     variant('psm2', default=False, description='Build support for the Intel PSM2 library.')
     variant('pmi', default=False, description='Build support for PMI-based launchers')
-    variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
+    variant('verbs', default=_verbs_dir() is not None, description='Build support for OpenFabrics verbs.')
     variant('mxm', default=False, description='Build Mellanox Messaging support')
 
     variant('thread_multiple', default=False, description='Enable MPI_THREAD_MULTIPLE support')
@@ -101,6 +121,13 @@ def verbs(self):
             return 'verbs'
 
     def install(self, spec, prefix):
+        # As of 06/2016 there is no mechanism to specify that packages which
+        # depends on MPI need C or/and Fortran implementation. For now
+        # require both.
+        if (self.compiler.f77 is None) or (self.compiler.fc is None):
+            raise InstallError('OpenMPI requires both C and Fortran ',
+                               'compilers!')
+
         config_args = ["--prefix=%s" % prefix,
                        "--with-hwloc=%s" % spec['hwloc'].prefix,
                        "--enable-shared",
@@ -113,7 +140,6 @@ def install(self, spec, prefix):
             # Fabrics
             '--with-psm' if '+psm' in spec else '--without-psm',
             '--with-psm2' if '+psm2' in spec else '--without-psm2',
-            ('--with-%s' % self.verbs) if '+verbs' in spec else ('--without-%s' % self.verbs),
             '--with-mxm' if '+mxm' in spec else '--without-mxm',
             # Other options
             '--enable-mpi-thread-multiple' if '+thread_multiple' in spec else '--disable-mpi-thread-multiple',
@@ -121,6 +147,14 @@ def install(self, spec, prefix):
             '--with-sqlite3' if '+sqlite3' in spec else '--without-sqlite3',
             '--enable-vt' if '+vt' in spec else '--disable-vt'
         ])
+        if '+verbs' in spec:
+            path = _verbs_dir()
+            if path is not None and path not in ('/usr', '/usr/local'):
+                config_args.append('--with-%s=%s' % (self.verbs, path))
+            else:
+                config_args.append('--with-%s' % self.verbs)
+        else:
+            config_args.append('--without-%s' % self.verbs)
 
         # TODO: use variants for this, e.g. +lanl, +llnl, etc.
         # use this for LANL builds, but for LLNL builds, we need:
diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py
index 34ab0703ad66c8ad36699ecc5977fa2e71c4b30a..377bffe7234e535f354cc5eab6d33763829cb9bd 100644
--- a/var/spack/repos/builtin/packages/openssl/package.py
+++ b/var/spack/repos/builtin/packages/openssl/package.py
@@ -100,7 +100,7 @@ def install(self, spec, prefix):
         # in the environment, then this will override what is set in the
         # Makefile, leading to build errors.
         env.pop('APPS', None)
-        if spec.satisfies("arch=darwin-x86_64") or spec.satisfies("arch=ppc64"):
+        if spec.satisfies("target=x86_64") or spec.satisfies("target=ppc64"):
             # This needs to be done for all 64-bit architectures (except Linux,
             # where it happens automatically?)
             env['KERNEL_BITS'] = '64'
diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py
index 2dead4a76ad9f2666072697132cd73ba08a88986..9b36f273e4c2a8f243e159c37160281abc3adef2 100644
--- a/var/spack/repos/builtin/packages/parmetis/package.py
+++ b/var/spack/repos/builtin/packages/parmetis/package.py
@@ -26,33 +26,36 @@
 from spack import *
 import sys
 
+
 class Parmetis(Package):
-    """
-    ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning unstructured
-    graphs, meshes, and for computing fill-reducing orderings of sparse matrices.
-    """
+    """ParMETIS is an MPI-based parallel library that implements a variety of
+       algorithms for partitioning unstructured graphs, meshes, and for
+       computing fill-reducing orderings of sparse matrices."""
+
     homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview'
-    url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz'
+    base_url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'
 
     version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628')
+    version('4.0.2', '0912a953da5bb9b5e5e10542298ffdce')
 
     variant('shared', default=True, description='Enables the build of shared libraries')
     variant('debug', default=False, description='Builds the library in debug mode')
     variant('gdb', default=False, description='Enables gdb support')
 
-    depends_on('cmake @2.8:')  # build dependency
+    depends_on('cmake@2.8:')  # build dependency
     depends_on('mpi')
-
-    patch('enable_external_metis.patch')
     depends_on('metis@5:')
 
+    patch('enable_external_metis.patch')
     # bug fixes from PETSc developers
-    # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/
+    # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/  # NOQA: ignore=E501
     patch('pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch')
-    # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/
+    # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/  # NOQA: ignore=E501
     patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch')
 
-    depends_on('gdb', when='+gdb')
+    def url_for_version(self, version):
+        verdir = 'OLD/' if version < Version('3.2.0') else ''
+        return '%s/%sparmetis-%s.tar.gz' % (Parmetis.base_url, verdir, version)
 
     def install(self, spec, prefix):
         options = []
@@ -60,30 +63,27 @@ def install(self, spec, prefix):
 
         build_directory = join_path(self.stage.path, 'spack-build')
         source_directory = self.stage.source_path
-        metis_source = join_path(source_directory, 'metis')
 
-        # FIXME : Once a contract is defined, MPI compilers should be retrieved indirectly via spec['mpi'] in case
-        # FIXME : they use a non-standard name
-        options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=spec['metis'].prefix.include),
-                        '-DMETIS_PATH:PATH={metis_source}'.format(metis_source=spec['metis'].prefix),
-                        '-DCMAKE_C_COMPILER:STRING=mpicc',
-                        '-DCMAKE_CXX_COMPILER:STRING=mpicxx'])
+        options.extend([
+            '-DGKLIB_PATH:PATH=%s/GKlib' % spec['metis'].prefix.include,
+            '-DMETIS_PATH:PATH=%s' % spec['metis'].prefix,
+            '-DCMAKE_C_COMPILER:STRING=%s' % spec['mpi'].mpicc,
+            '-DCMAKE_CXX_COMPILER:STRING=%s' % spec['mpi'].mpicxx
+        ])
 
         if '+shared' in spec:
             options.append('-DSHARED:BOOL=ON')
-
         if '+debug' in spec:
             options.extend(['-DDEBUG:BOOL=ON',
                             '-DCMAKE_BUILD_TYPE:STRING=Debug'])
-
         if '+gdb' in spec:
             options.append('-DGDB:BOOL=ON')
 
         with working_dir(build_directory, create=True):
             cmake(source_directory, *options)
             make()
-            make("install")
+            make('install')
 
-            # The shared library is not installed correctly on Darwin; correct this
+            # The shared library is not installed correctly on Darwin; fix this
             if (sys.platform == 'darwin') and ('+shared' in spec):
                 fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/pcre/intel.patch b/var/spack/repos/builtin/packages/pcre/intel.patch
new file mode 100644
index 0000000000000000000000000000000000000000..f160f55e1bd9e076606fd7a37ad2ea97c87edbac
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pcre/intel.patch
@@ -0,0 +1,12 @@
+diff -up pcre-8.38/pcrecpp.cc.intel pcre-8.38/pcrecpp.cc
+--- pcre-8.38/pcrecpp.cc.intel	2014-09-15 07:48:59.000000000 -0600
++++ pcre-8.38/pcrecpp.cc	2016-06-08 16:16:56.702721214 -0600
+@@ -66,7 +66,7 @@ Arg RE::no_arg((void*)NULL);
+ // inclusive test if we ever needed it.  (Note that not only the
+ // __attribute__ syntax, but also __USER_LABEL_PREFIX__, are
+ // gnu-specific.)
+-#if defined(__GNUC__) && __GNUC__ >= 3 && defined(__ELF__)
++#if defined(__GNUC__) && __GNUC__ >= 3 && defined(__ELF__) && !defined(__INTEL_COMPILER)
+ # define ULP_AS_STRING(x)            ULP_AS_STRING_INTERNAL(x)
+ # define ULP_AS_STRING_INTERNAL(x)   #x
+ # define USER_LABEL_PREFIX_STR       ULP_AS_STRING(__USER_LABEL_PREFIX__)
diff --git a/var/spack/repos/builtin/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py
index 7a9f3b911d02010ab489ee70abc2a32588af3b03..8e0f83110e9436278c9ea4ff39f90e14072de02e 100644
--- a/var/spack/repos/builtin/packages/pcre/package.py
+++ b/var/spack/repos/builtin/packages/pcre/package.py
@@ -24,6 +24,7 @@
 ##############################################################################
 from spack import *
 
+
 class Pcre(Package):
     """The PCRE package contains Perl Compatible Regular Expression
        libraries. These are useful for implementing regular expression
@@ -34,6 +35,8 @@ class Pcre(Package):
     version('8.36', 'b767bc9af0c20bc9c1fe403b0d41ad97')
     version('8.38', '00aabbfe56d5a48b270f999b508c5ad2')
 
+    patch("intel.patch")
+
     def install(self, spec, prefix):
         configure("--prefix=%s" % prefix)
         make()
diff --git a/var/spack/repos/builtin/packages/py-astropy/package.py b/var/spack/repos/builtin/packages/py-astropy/package.py
index 86875bbcaea172fc7bd99dbc63528bf957d3a220..a9962777dc037489966ccc6c5b2c7da98a1fb03a 100644
--- a/var/spack/repos/builtin/packages/py-astropy/package.py
+++ b/var/spack/repos/builtin/packages/py-astropy/package.py
@@ -24,29 +24,38 @@
 ##############################################################################
 from spack import *
 
+
 class PyAstropy(Package):
-    """
-    The Astropy Project is a community effort to develop a single core
+    """The Astropy Project is a community effort to develop a single core
     package for Astronomy in Python and foster interoperability between
-    Python astronomy packages.
-    """
+    Python astronomy packages."""
+
     homepage = 'http://www.astropy.org/'
+    url = 'https://pypi.python.org/packages/source/a/astropy/astropy-1.1.2.tar.gz'
 
+    version('1.1.2',     'cbe32023b5b1177d1e2498a0d00cda51')
     version('1.1.post1', 'b52919f657a37d45cc45f5cb0f58c44d')
 
-    def url_for_version(self, version):
-        return 'https://pypi.python.org/packages/source/a/astropy/astropy-{0}.tar.gz'.format(version)
-
+    # Required dependencies
     extends('python')
+    depends_on('py-numpy')
 
-    depends_on('cfitsio')
-    depends_on('expat')
+    # Optional dependencies
     depends_on('py-h5py')
-    depends_on('py-numpy')
+    depends_on('py-beautifulsoup4')
+    depends_on('py-pyyaml')
     depends_on('py-scipy')
+    depends_on('libxml2')
+    depends_on('py-matplotlib')
+    depends_on('py-pytz')
+    depends_on('py-scikit-image')
+    depends_on('py-pandas')
+
+    # System dependencies
+    depends_on('cfitsio')
+    depends_on('expat')
 
     def install(self, spec, prefix):
         python('setup.py', 'build', '--use-system-cfitsio',
-                                    '--use-system-expat')
-        python('setup.py', 'install', '--prefix=' + prefix)
-
+               '--use-system-expat')
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py b/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..64368fe70d92966e2f630db6de86fd0e7d48dbc6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBeautifulsoup4(Package):
+    """Beautiful Soup is a Python library for pulling data out of HTML and
+    XML files. It works with your favorite parser to provide idiomatic ways
+    of navigating, searching, and modifying the parse tree."""
+
+    homepage = "https://www.crummy.com/software/BeautifulSoup"
+    url = "https://pypi.python.org/packages/source/b/beautifulsoup4/beautifulsoup4-4.4.1.tar.gz"
+
+    version('4.4.1', '8fbd9a7cac0704645fa20d1419036815')
+
+    extends('python')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-emcee/package.py b/var/spack/repos/builtin/packages/py-emcee/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d57ef4bd76ca0f051ae8c201c3956be47ff2be9f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-emcee/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEmcee(Package):
+    """emcee is an MIT licensed pure-Python implementation of Goodman & Weare's
+    Affine Invariant Markov chain Monte Carlo (MCMC) Ensemble sampler."""
+
+    homepage = "http://dan.iel.fm/emcee/current/"
+    url = "https://pypi.python.org/packages/source/e/emcee/emcee-2.1.0.tar.gz"
+
+    version('2.1.0', 'c6b6fad05c824d40671d4a4fc58dfff7')
+
+    extends('python')
+    depends_on('py-numpy')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py
index de72bac44ea1a2f9f4365e08695df81e1ed33f58..0180b658f5b6e589dc0b8e9b8a447029af7b16aa 100644
--- a/var/spack/repos/builtin/packages/py-h5py/package.py
+++ b/var/spack/repos/builtin/packages/py-h5py/package.py
@@ -23,21 +23,42 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack import *
-import re
+
 
 class PyH5py(Package):
-    """The h5py package provides both a high- and low-level interface to the HDF5 library from Python."""
+    """The h5py package provides both a high- and low-level interface to the
+    HDF5 library from Python."""
+
     homepage = "https://pypi.python.org/pypi/h5py"
     url      = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz"
 
-    version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758')
+    version('2.6.0', 'ec476211bd1de3f5ac150544189b0bf4')
     version('2.5.0', '6e4301b5ad5da0d51b0a1e5ac19e3b74')
+    version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758')
+
+    variant('mpi', default=False, description='Build with MPI support')
 
-    extends('python', ignore=lambda f: re.match(r'bin/cy*', f))
-    depends_on('hdf5')
-    depends_on('py-numpy')
-    depends_on('py-cython')
+    extends('python')
+
+    # Build dependencies
+    depends_on('py-cython@0.19:')
+    depends_on('pkg-config')
+    depends_on('hdf5@1.8.4:+mpi', when='+mpi')
+    depends_on('hdf5@1.8.4:~mpi', when='~mpi')
+    depends_on('mpi', when='+mpi')
+
+    # Build and runtime dependencies
+    depends_on('py-numpy@1.6.1:')
+
+    # Runtime dependencies
+    depends_on('py-six')
 
     def install(self, spec, prefix):
-        python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix)
-        python('setup.py', 'install', '--prefix=%s' % prefix)
+        python('setup.py', 'configure',
+               '--hdf5={0}'.format(spec['hdf5'].prefix))
+
+        if '+mpi' in spec:
+            env['CC'] = spec['mpi'].mpicc
+            python('setup.py', 'configure', '--mpi')
+
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-iminuit/package.py b/var/spack/repos/builtin/packages/py-iminuit/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..47751a702d14d8c620f8bc128a64ac48301aa003
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-iminuit/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyIminuit(Package):
+    """Interactive IPython-Friendly Minimizer based on SEAL Minuit2."""
+
+    homepage = "https://pypi.python.org/pypi/iminuit"
+    url      = "https://pypi.python.org/packages/source/i/iminuit/iminuit-1.2.tar.gz"
+
+    version('1.2', '4701ec472cae42015e26251703e6e984')
+
+    # Required dependencies
+    extends('python')
+    depends_on('py-setuptools')
+
+    # Optional dependencies
+    depends_on('py-numpy')
+    depends_on('py-matplotlib')
+    depends_on('py-cython')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-nestle/package.py b/var/spack/repos/builtin/packages/py-nestle/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..16506e3eca0190ebfeb5935bb4300c1ed6976b4c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-nestle/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyNestle(Package):
+    """Nested sampling algorithms for evaluating Bayesian evidence."""
+
+    homepage = "http://kbarbary.github.io/nestle/"
+    url = "https://pypi.python.org/packages/source/n/nestle/nestle-0.1.1.tar.gz"
+
+    version('0.1.1', '4875c0f9a0a8e263c1d7f5fa6ce604c5')
+
+    # Required dependencies
+    extends('python')
+    depends_on('py-numpy')
+
+    # Optional dependencies
+    depends_on('py-scipy')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-ply/package.py b/var/spack/repos/builtin/packages/py-ply/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..47cd3b5dc8458761987e2d4006317daf65a01e85
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ply/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPly(Package):
+    """PLY is nothing more than a straightforward lex/yacc implementation."""
+    homepage = "http://www.dabeaz.com/ply"
+    url      = "http://www.dabeaz.com/ply/ply-3.8.tar.gz"
+
+    version('3.8', '94726411496c52c87c2b9429b12d5c50')
+
+    extends('python')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-sncosmo/package.py b/var/spack/repos/builtin/packages/py-sncosmo/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..82ae2a2e69c0af8a6d9349953615d7018ca6dd0a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sncosmo/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySncosmo(Package):
+    """SNCosmo is a Python library for high-level supernova cosmology
+    analysis."""
+
+    homepage = "http://sncosmo.readthedocs.io/"
+    url = "https://pypi.python.org/packages/source/s/sncosmo/sncosmo-1.2.0.tar.gz"
+
+    version('1.2.0', '028e6d1dc84ab1c17d2f3b6378b2cb1e')
+
+    # Required dependencies
+    # py-sncosmo binaries are duplicates of those from py-astropy
+    extends('python', ignore=r'bin/.*')
+    depends_on('py-numpy')
+    depends_on('py-scipy')
+    depends_on('py-astropy')
+
+    # Recommended dependencies
+    depends_on('py-matplotlib')
+    depends_on('py-iminuit')
+    depends_on('py-emcee')
+    depends_on('py-nestle')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-wcsaxes/package.py b/var/spack/repos/builtin/packages/py-wcsaxes/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0adbe365806020cd90163ee7134df6b5e44bbd0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-wcsaxes/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyWcsaxes(Package):
+    """WCSAxes is a framework for making plots of Astronomical data
+    in Matplotlib."""
+
+    homepage = "http://wcsaxes.readthedocs.io/en/latest/index.html"
+    url      = "https://github.com/astrofrog/wcsaxes/archive/v0.8.tar.gz"
+
+    version('0.8', 'de1c60fdae4c330bf5ddb9f1ab5ab920')
+
+    extends('python', ignore=r'bin/pbr')
+    depends_on('py-numpy')
+    depends_on('py-matplotlib')
+    depends_on('py-astropy')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index 86c12498e1f42359ab124a65676bdbba0c9721d4..b6a62bf742073301534650ef41e4ac587acebcc7 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -22,9 +22,6 @@
 # License along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
-import functools
-import glob
-import inspect
 import os
 import re
 from contextlib import closing
@@ -37,11 +34,10 @@
 
 class Python(Package):
     """The Python programming language."""
+
     homepage = "http://www.python.org"
     url      = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz"
 
-    extendable = True
-
     version('3.5.1', 'be78e48cdfc1a7ad90efff146dce6cfe')
     version('3.5.0', 'a56c0c0b45d75a0ec9c6dee933c41c36')
     version('2.7.11', '6b6076ec9e93f05dd63e47eb9c15728b', preferred=True)
@@ -49,6 +45,8 @@ class Python(Package):
     version('2.7.9', '5eebcaa0030dc4061156d3429657fb83')
     version('2.7.8', 'd4bca0159acb0b44a781292b5231936f')
 
+    extendable = True
+
     depends_on("openssl")
     depends_on("bzip2")
     depends_on("readline")
@@ -64,39 +62,63 @@ def install(self, spec, prefix):
         # Rest of install is pretty standard except setup.py needs to
         # be able to read the CPPFLAGS and LDFLAGS as it scans for the
         # library and headers to build
-        configure_args= [
-                  "--prefix=%s" % prefix,
-                  "--with-threads",
-                  "--enable-shared",
-                  "CPPFLAGS=-I%s/include -I%s/include -I%s/include -I%s/include -I%s/include -I%s/include" % (
-                       spec['openssl'].prefix, spec['bzip2'].prefix,
-                       spec['readline'].prefix, spec['ncurses'].prefix,
-                       spec['sqlite'].prefix, spec['zlib'].prefix),
-                  "LDFLAGS=-L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib" % (
-                       spec['openssl'].prefix, spec['bzip2'].prefix,
-                       spec['readline'].prefix, spec['ncurses'].prefix,
-                       spec['sqlite'].prefix, spec['zlib'].prefix)
-                  ]
+        cppflags = ' -I'.join([
+            spec['openssl'].prefix.include,  spec['bzip2'].prefix.include,
+            spec['readline'].prefix.include, spec['ncurses'].prefix.include,
+            spec['sqlite'].prefix.include,   spec['zlib'].prefix.include
+        ])
+
+        ldflags = ' -L'.join([
+            spec['openssl'].prefix.lib,  spec['bzip2'].prefix.lib,
+            spec['readline'].prefix.lib, spec['ncurses'].prefix.lib,
+            spec['sqlite'].prefix.lib,   spec['zlib'].prefix.lib
+        ])
+
+        config_args = [
+            "--prefix={0}".format(prefix),
+            "--with-threads",
+            "--enable-shared",
+            "CPPFLAGS=-I{0}".format(cppflags),
+            "LDFLAGS=-L{0}".format(ldflags)
+        ]
+
         if spec.satisfies('@3:'):
-            configure_args.append('--without-ensurepip')
-        configure(*configure_args)
+            config_args.append('--without-ensurepip')
+
+        configure(*config_args)
+
         make()
         make("install")
 
-        # Modify compiler paths in configuration files. This is necessary for
-        # building site packages outside of spack
-        filter_file(r'([/s]=?)([\S=]*)/lib/spack/env(/[^\s/]*)?/(\S*)(\s)',
-                    (r'\4\5'),
-                    join_path(prefix.lib, 'python%d.%d' % self.version[:2], '_sysconfigdata.py'))
+        self.filter_compilers(spec, prefix)
+
+    def filter_compilers(self, spec, prefix):
+        """Run after install to tell the configuration files and Makefiles
+        to use the compilers that Spack built the package with.
+
+        If this isn't done, they'll have CC and CXX set to Spack's generic
+        cc and c++. We want them to be bound to whatever compiler
+        they were built with."""
+
+        kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
 
-        python3_version = ''
+        dirname = join_path(prefix.lib,
+                            'python{0}'.format(self.version.up_to(2)))
+
+        config = 'config'
         if spec.satisfies('@3:'):
-            python3_version = '-%d.%dm' % self.version[:2]
-        makefile_filepath = join_path(prefix.lib, 'python%d.%d' % self.version[:2], 'config%s' % python3_version, 'Makefile')
-        filter_file(r'([/s]=?)([\S=]*)/lib/spack/env(/[^\s/]*)?/(\S*)(\s)',
-                    (r'\4\5'),
-                    makefile_filepath)
+            config = 'config-{0}m'.format(self.version.up_to(2))
+
+        files = [
+            '_sysconfigdata.py',
+            join_path(config, 'Makefile')
+        ]
 
+        for filename in files:
+            filter_file(env['CC'],  self.compiler.cc,
+                        join_path(dirname, filename), **kwargs)
+            filter_file(env['CXX'], self.compiler.cxx,
+                        join_path(dirname, filename), **kwargs)
 
     # ========================================================================
     # Set up environment to make install easy for python extensions.
@@ -104,57 +126,59 @@ def install(self, spec, prefix):
 
     @property
     def python_lib_dir(self):
-        return os.path.join('lib', 'python%d.%d' % self.version[:2])
-
+        return join_path('lib', 'python{0}'.format(self.version.up_to(2)))
 
     @property
     def python_include_dir(self):
-        return os.path.join('include', 'python%d.%d' % self.version[:2])
-
+        return join_path('include', 'python{0}'.format(self.version.up_to(2)))
 
     @property
     def site_packages_dir(self):
-        return os.path.join(self.python_lib_dir, 'site-packages')
-
+        return join_path(self.python_lib_dir, 'site-packages')
 
     def setup_dependent_environment(self, spack_env, run_env, extension_spec):
-        # TODO: do this only for actual extensions.
+        """Set PYTHONPATH to include site-packages dir for the
+        extension and any other python extensions it depends on."""
 
-        # Set PYTHONPATH to include site-packages dir for the
-        # extension and any other python extensions it depends on.
         python_paths = []
         for d in extension_spec.traverse():
             if d.package.extends(self.spec):
-                python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
+                python_paths.append(join_path(d.prefix,
+                                              self.site_packages_dir))
 
         pythonpath = ':'.join(python_paths)
         spack_env.set('PYTHONPATH', pythonpath)
 
-        # For run time environment set only the path for extension_spec and prepend it to PYTHONPATH
+        # For run time environment set only the path for
+        # extension_spec and prepend it to PYTHONPATH
         if extension_spec.package.extends(self.spec):
-            run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir))
-
+            run_env.prepend_path('PYTHONPATH', join_path(
+                extension_spec.prefix, self.site_packages_dir))
 
     def setup_dependent_package(self, module, ext_spec):
-        """
-        Called before python modules' install() methods.
+        """Called before python modules' install() methods.
 
         In most cases, extensions will only need to have one line::
 
-        python('setup.py', 'install', '--prefix=%s' % prefix)
-        """
+        python('setup.py', 'install', '--prefix={0}'.format(prefix))"""
+
         # Python extension builds can have a global python executable function
-        if self.version >= Version("3.0.0") and self.version < Version("4.0.0"):
-            module.python = Executable(join_path(self.spec.prefix.bin, 'python3'))
+        if Version("3.0.0") <= self.version < Version("4.0.0"):
+            module.python = Executable(join_path(self.spec.prefix.bin,
+                                                 'python3'))
         else:
-            module.python = Executable(join_path(self.spec.prefix.bin, 'python'))
+            module.python = Executable(join_path(self.spec.prefix.bin,
+                                                 'python'))
 
         # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
-        module.python_lib_dir     = os.path.join(ext_spec.prefix, self.python_lib_dir)
-        module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir)
-        module.site_packages_dir  = os.path.join(ext_spec.prefix, self.site_packages_dir)
-
-        # Make the site packages directory for extensions, if it does not exist already.
+        module.python_lib_dir     = join_path(ext_spec.prefix,
+                                              self.python_lib_dir)
+        module.python_include_dir = join_path(ext_spec.prefix,
+                                              self.python_include_dir)
+        module.site_packages_dir  = join_path(ext_spec.prefix,
+                                              self.site_packages_dir)
+
+        # Make the site packages directory for extensions
         if ext_spec.package.is_extension:
             mkdirp(module.site_packages_dir)
 
@@ -167,25 +191,28 @@ def python_ignore(self, ext_pkg, args):
         ignore_arg = args.get('ignore', lambda f: False)
 
         # Always ignore easy-install.pth, as it needs to be merged.
-        patterns = [r'easy-install\.pth$']
+        patterns = [r'site-packages/easy-install\.pth$']
 
         # Ignore pieces of setuptools installed by other packages.
+        # Must include directory name or it will remove all site*.py files.
         if ext_pkg.name != 'py-setuptools':
-            patterns.append(r'/site[^/]*\.pyc?$')
-            patterns.append(r'setuptools\.pth')
-            patterns.append(r'bin/easy_install[^/]*$')
-            patterns.append(r'setuptools.*egg$')
+            patterns.extend([
+                r'bin/easy_install[^/]*$',
+                r'site-packages/setuptools[^/]*\.egg$',
+                r'site-packages/setuptools\.pth$',
+                r'site-packages/site[^/]*\.pyc?$',
+                r'site-packages/__pycache__/site[^/]*\.pyc?$'
+            ])
         if ext_pkg.name != 'py-numpy':
             patterns.append(r'bin/f2py$')
 
         return match_predicate(ignore_arg, patterns)
 
-
     def write_easy_install_pth(self, exts):
         paths = []
         for ext in sorted(exts.values()):
-            ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir)
-            easy_pth = "%s/easy-install.pth" % ext_site_packages
+            ext_site_packages = join_path(ext.prefix, self.site_packages_dir)
+            easy_pth = join_path(ext_site_packages, "easy-install.pth")
 
             if not os.path.isfile(easy_pth):
                 continue
@@ -195,15 +222,18 @@ def write_easy_install_pth(self, exts):
                     line = line.rstrip()
 
                     # Skip lines matching these criteria
-                    if not line: continue
-                    if re.search(r'^(import|#)', line): continue
-                    if (ext.name != 'py-setuptools' and
-                        re.search(r'setuptools.*egg$', line)): continue
+                    if not line:
+                        continue
+                    if re.search(r'^(import|#)', line):
+                        continue
+                    if ((ext.name != 'py-setuptools' and
+                         re.search(r'setuptools.*egg$', line))):
+                        continue
 
                     paths.append(line)
 
-        site_packages = os.path.join(self.prefix, self.site_packages_dir)
-        main_pth = "%s/easy-install.pth" % site_packages
+        site_packages = join_path(self.prefix, self.site_packages_dir)
+        main_pth = join_path(site_packages, "easy-install.pth")
 
         if not paths:
             if os.path.isfile(main_pth):
@@ -211,15 +241,22 @@ def write_easy_install_pth(self, exts):
 
         else:
             with closing(open(main_pth, 'w')) as f:
-                f.write("import sys; sys.__plen = len(sys.path)\n")
+                f.write("""
+import sys
+sys.__plen = len(sys.path)
+""")
                 for path in paths:
-                    f.write("%s\n" % path)
-                f.write("import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; "
-                        "p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n")
-
+                    f.write("{0}\n".format(path))
+                f.write("""
+new = sys.path[sys.__plen:]
+del sys.path[sys.__plen:]
+p = getattr(sys, '__egginsert', 0)
+sys.path[p:p] = new
+sys.__egginsert = p + len(new)
+""")
 
     def activate(self, ext_pkg, **args):
-        ignore=self.python_ignore(ext_pkg, args)
+        ignore = self.python_ignore(ext_pkg, args)
         args.update(ignore=ignore)
 
         super(Python, self).activate(ext_pkg, **args)
@@ -228,12 +265,12 @@ def activate(self, ext_pkg, **args):
         exts[ext_pkg.name] = ext_pkg.spec
         self.write_easy_install_pth(exts)
 
-
     def deactivate(self, ext_pkg, **args):
         args.update(ignore=self.python_ignore(ext_pkg, args))
         super(Python, self).deactivate(ext_pkg, **args)
 
         exts = spack.install_layout.extension_map(self.spec)
-        if ext_pkg.name in exts:        # Make deactivate idempotent.
+        # Make deactivate idempotent
+        if ext_pkg.name in exts:
             del exts[ext_pkg.name]
             self.write_easy_install_pth(exts)
diff --git a/var/spack/repos/builtin/packages/rust-bindgen/package.py b/var/spack/repos/builtin/packages/rust-bindgen/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..854016d12a384518380a0bb50b2aea3483f46bd3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rust-bindgen/package.py
@@ -0,0 +1,18 @@
+from spack import *
+import os
+
+
+class RustBindgen(Package):
+    """The rust programming language toolchain"""
+    homepage = "http://www.rust-lang.org"
+    url = "https://github.com/crabtw/rust-bindgen"
+
+    version('0.16', tag='0.16', git='https://github.com/crabtw/rust-bindgen')
+
+    extends("rust")
+    depends_on("llvm")
+
+    def install(self, spec, prefix):
+        env = dict(os.environ)
+        env['LIBCLANG_PATH'] = os.path.join(spec['llvm'].prefix, 'lib')
+        cargo('install', '--root', prefix, env=env)
diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..65f81ce534224b61519deaed42f5a67f6efc13e7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rust/package.py
@@ -0,0 +1,63 @@
+from spack import *
+import os
+
+
+def get_submodules():
+    git = which('git')
+    git('submodule', 'update', '--init', '--recursive')
+
+class Rust(Package):
+    """The rust programming language toolchain"""
+    homepage = "http://www.rust-lang.org"
+    url = "https://github.com/rust-lang/rust"
+
+    version('1.8.0', tag='1.8.0', git="https://github.com/rust-lang/rust")
+
+    resource(name='cargo',
+             git="https://github.com/rust-lang/cargo.git",
+             tag='0.10.0',
+             destination='cargo')
+
+    extendable = True
+
+    # Rust
+    depends_on("llvm")
+    depends_on("curl")
+    depends_on("git")
+    depends_on("cmake")
+    depends_on("python@:2.8")
+
+    # Cargo
+    depends_on("openssl")
+
+    def install(self, spec, prefix):
+        configure('--prefix=%s' % prefix,
+                  '--llvm-root=' + spec['llvm'].prefix)
+
+        make()
+        make("install")
+
+        # Install cargo, rust package manager
+        with working_dir(os.path.join('cargo', 'cargo')):
+            get_submodules()
+            configure('--prefix=' + prefix,
+                      '--local-rust-root=' + prefix)
+
+            make()
+            make("install")
+
+    def setup_dependent_package(self, module, ext_spec):
+        """
+        Called before python modules' install() methods.
+
+        In most cases, extensions will only need to have one or two lines::
+
+            cargo('build')
+            cargo('install', '--root', prefix)
+
+        or
+
+            cargo('install', '--root', prefix)
+        """
+        # Rust extension builds can have a global cargo executable function
+        module.cargo = Executable(join_path(self.spec.prefix.bin, 'cargo'))
diff --git a/var/spack/repos/builtin/packages/serf/package.py b/var/spack/repos/builtin/packages/serf/package.py
index 3b1d08889ca1397327ea3d4134c91f7d8b041553..817db682413a7f84b0d0f9f93439a13486d9c378 100644
--- a/var/spack/repos/builtin/packages/serf/package.py
+++ b/var/spack/repos/builtin/packages/serf/package.py
@@ -24,8 +24,10 @@
 ##############################################################################
 from spack import *
 
+
 class Serf(Package):
-    """Apache Serf - a high performance C-based HTTP client library built upon the Apache Portable Runtime (APR) library"""
+    """Apache Serf - a high performance C-based HTTP client library
+    built upon the Apache Portable Runtime (APR) library"""
     homepage  = 'https://serf.apache.org/'
     url       = 'https://archive.apache.org/dist/serf/serf-1.3.8.tar.bz2'
 
@@ -36,6 +38,7 @@ class Serf(Package):
     depends_on('scons')
     depends_on('expat')
     depends_on('openssl')
+    depends_on('zlib')
 
     def install(self, spec, prefix):
         scons = which("scons")
@@ -44,8 +47,10 @@ def install(self, spec, prefix):
         options.append('APR=%s' % spec['apr'].prefix)
         options.append('APU=%s' % spec['apr-util'].prefix)
         options.append('OPENSSL=%s' % spec['openssl'].prefix)
-        options.append('LINKFLAGS=-L%s/lib' % spec['expat'].prefix)
-        options.append('CPPFLAGS=-I%s/include' % spec['expat'].prefix)
+        options.append('LINKFLAGS=-L%s/lib -L%s/lib' %
+                       (spec['expat'].prefix, spec['zlib'].prefix))
+        options.append('CPPFLAGS=-I%s/include -I%s/include' %
+                       (spec['expat'].prefix, spec['zlib'].prefix))
 
         scons(*options)
         scons('install')
diff --git a/var/spack/repos/builtin/packages/stream/package.py b/var/spack/repos/builtin/packages/stream/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..8b3f32af8a831858dcf0627b0584f11e48f86d2b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/stream/package.py
@@ -0,0 +1,62 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Stream(Package):
+    """The STREAM benchmark is a simple synthetic benchmark program that
+    measures sustainable memory bandwidth (in MB/s) and the corresponding
+    computation rate for simple vector kernels."""
+
+    homepage = "https://www.cs.virginia.edu/stream/ref.html"
+
+    version('5.10', git='https://github.com/jeffhammond/STREAM.git')
+
+    variant('openmp', default=False, description='Build with OpenMP support')
+
+    def patch(self):
+        makefile = FileFilter('Makefile')
+
+        # Use the Spack compiler wrappers
+        makefile.filter('CC = .*', 'CC = cc')
+        makefile.filter('FC = .*', 'FC = f77')
+
+        cflags = '-O2'
+        fflags = '-O2'
+        if '+openmp' in self.spec:
+            cflags += ' ' + self.compiler.openmp_flag
+            fflags += ' ' + self.compiler.openmp_flag
+
+        # Set the appropriate flags for this compiler
+        makefile.filter('CFLAGS = .*', 'CFLAGS = {0}'.format(cflags))
+        makefile.filter('FFLAGS = .*', 'FFLAGS = {0}'.format(fflags))
+
+    def install(self, spec, prefix):
+        make()
+
+        # Manual installation
+        mkdir(prefix.bin)
+        install('stream_c.exe', prefix.bin)
+        install('stream_f.exe', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/tetgen/package.py b/var/spack/repos/builtin/packages/tetgen/package.py
index 5e87ed7fbabd8b199c1af89aa2c6d37dd4fe72d9..c301a5b4e5b01861e881011924b3de66aae325d8 100644
--- a/var/spack/repos/builtin/packages/tetgen/package.py
+++ b/var/spack/repos/builtin/packages/tetgen/package.py
@@ -24,16 +24,19 @@
 ##############################################################################
 from spack import *
 
+
 class Tetgen(Package):
-    """TetGen is a program and library that can be used to generate tetrahedral
-       meshes for given 3D polyhedral domains. TetGen generates exact constrained
-       Delaunay tetrahedralizations, boundary conforming Delaunay meshes, and
-       Voronoi paritions."""
+    """TetGen is a program and library that can be used to generate
+       tetrahedral meshes for given 3D polyhedral domains. TetGen
+       generates exact constrained Delaunay tetrahedralizations,
+       boundary conforming Delaunay meshes, and Voronoi paritions.
+    """
 
     homepage = "http://www.tetgen.org"
     url      = "http://www.tetgen.org/files/tetgen1.4.3.tar.gz"
 
     version('1.4.3', 'd6a4bcdde2ac804f7ec66c29dcb63c18')
+    version('1.5.0', '3b9fd9cdec121e52527b0308f7aad5c1', url='http://www.tetgen.org/1.5/src/tetgen1.5.0.tar.gz')
 
     # TODO: Make this a build dependency once build dependencies are supported
     # (see: https://github.com/LLNL/spack/pull/378).
diff --git a/var/spack/repos/builtin/packages/tmux/package.py b/var/spack/repos/builtin/packages/tmux/package.py
index c46425c0d3630c8d990f0bc402f7c1134ddcba82..573ee38a794befb7f0703904fd716846dfc6db5c 100644
--- a/var/spack/repos/builtin/packages/tmux/package.py
+++ b/var/spack/repos/builtin/packages/tmux/package.py
@@ -24,26 +24,33 @@
 ##############################################################################
 from spack import *
 
+
 class Tmux(Package):
     """tmux is a terminal multiplexer. What is a terminal multiplexer? It lets
-       you switch easily between several programs in one terminal, detach them (they
-       keep running in the background) and reattach them to a different terminal. And
-       do a lot more.
+       you switch easily between several programs in one terminal, detach them
+       (they keep running in the background) and reattach them to a different
+       terminal. And do a lot more.
     """
 
     homepage = "http://tmux.github.io"
-    url = "https://github.com/tmux/tmux/releases/download/2.1/tmux-2.1.tar.gz"
+    url = "https://github.com/tmux/tmux/releases/download/2.2/tmux-2.2.tar.gz"
 
     version('1.9a', 'b07601711f96f1d260b390513b509a2d')
     version('2.1', '74a2855695bccb51b6e301383ad4818c')
+    version('2.2', 'bd95ee7205e489c62c616bb7af040099')
 
     depends_on('libevent')
     depends_on('ncurses')
 
     def install(self, spec, prefix):
+        pkg_config_path = ':'.join([
+            spec['libevent'].prefix,
+            spec['ncurses'].prefix
+        ])
+
         configure(
             "--prefix=%s" % prefix,
-            "PKG_CONFIG_PATH=%s:%s" % (spec['libevent'].prefix, spec['ncurses'].prefix))
+            "PKG_CONFIG_PATH=%s" % pkg_config_path)
 
         make()
         make("install")
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
index 1eaec86405097b23369fe2fe5ac6261ecdab4a74..6913d79dcc9cf83d5796eeeb5572d8b1ce3b0cbc 100644
--- a/var/spack/repos/builtin/packages/trilinos/package.py
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -23,18 +23,23 @@
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack import *
-import os, sys, glob
+import os
+import sys
 
-# Trilinos is complicated to build, as an inspiration a couple of links to other repositories which build it:
+# Trilinos is complicated to build, as an inspiration a couple of links to
+# other repositories which build it:
 # https://github.com/hpcugent/easybuild-easyblocks/blob/master/easybuild/easyblocks/t/trilinos.py#L111
 # https://github.com/koecher/candi/blob/master/deal.II-toolchain/packages/trilinos.package
 # https://gitlab.com/configurations/cluster-config/blob/master/trilinos.sh
-# https://github.com/Homebrew/homebrew-science/blob/master/trilinos.rb
-# and some relevant documentation/examples:
+# https://github.com/Homebrew/homebrew-science/blob/master/trilinos.rb and some
+# relevant documentation/examples:
 # https://github.com/trilinos/Trilinos/issues/175
+
+
 class Trilinos(Package):
-    """The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented
-    software framework for the solution of large-scale, complex multi-physics engineering and scientific problems.
+    """The Trilinos Project is an effort to develop algorithms and enabling
+    technologies within an object-oriented software framework for the solution
+    of large-scale, complex multi-physics engineering and scientific problems.
     A unique design feature of Trilinos is its focus on packages.
     """
     homepage = "https://trilinos.org/"
@@ -54,49 +59,51 @@ class Trilinos(Package):
     variant('hypre',        default=True,  description='Compile with Hypre preconditioner')
     variant('hdf5',         default=True,  description='Compile with HDF5')
     variant('suite-sparse', default=True,  description='Compile with SuiteSparse solvers')
-    # not everyone has py-numpy activated, keep it disabled by default to avoid configure errors
+    # not everyone has py-numpy activated, keep it disabled by default to avoid
+    # configure errors
     variant('python',       default=False, description='Build python wrappers')
     variant('shared',       default=True,  description='Enables the build of shared libraries')
     variant('debug',        default=False, description='Builds a debug version of the libraries')
+    variant('boost',        default=True, description='Compile with Boost')
 
     # Everything should be compiled with -fpic
     depends_on('blas')
     depends_on('lapack')
-    depends_on('boost')
+    depends_on('boost', when='+boost')
     depends_on('matio')
     depends_on('glm')
     depends_on('swig')
-    depends_on('metis@5:',when='+metis')
-    depends_on('suite-sparse',when='+suite-sparse')
+    depends_on('metis@5:', when='+metis')
+    depends_on('suite-sparse', when='+suite-sparse')
 
     # MPI related dependencies
     depends_on('mpi')
     depends_on('netcdf+mpi')
-    depends_on('parmetis',when='+metis')
-    # Trilinos' Tribits config system is limited which makes it
-    # very tricky to link Amesos with static MUMPS, see
+    depends_on('parmetis', when='+metis')
+    # Trilinos' Tribits config system is limited which makes it very tricky to
+    # link Amesos with static MUMPS, see
     # https://trilinos.org/docs/dev/packages/amesos2/doc/html/classAmesos2_1_1MUMPS.html
-    # One could work it out by getting linking flags from mpif90 --showme:link (or alike)
-    # and adding results to -DTrilinos_EXTRA_LINK_FLAGS
-    # together with Blas and Lapack and ScaLAPACK and Blacs and -lgfortran and
-    # it may work at the end. But let's avoid all this by simply using shared libs
-    depends_on('mumps@5.0:+mpi+shared',when='+mumps')
-    depends_on('scalapack',when='+mumps')
-    depends_on('superlu-dist',when='+superlu-dist')
-    depends_on('hypre~internal-superlu',when='+hypre')
-    depends_on('hdf5+mpi',when='+hdf5')
-
-    depends_on('python',when='+python')
+    # One could work it out by getting linking flags from mpif90 --showme:link
+    # (or alike) and adding results to -DTrilinos_EXTRA_LINK_FLAGS together
+    # with Blas and Lapack and ScaLAPACK and Blacs and -lgfortran and it may
+    # work at the end. But let's avoid all this by simply using shared libs
+    depends_on('mumps@5.0:+mpi+shared', when='+mumps')
+    depends_on('scalapack', when='+mumps')
+    depends_on('superlu-dist', when='+superlu-dist')
+    depends_on('hypre~internal-superlu', when='+hypre')
+    depends_on('hdf5+mpi', when='+hdf5')
+    depends_on('python', when='+python')
 
     patch('umfpack_from_suitesparse.patch')
 
     # check that the combination of variants makes sense
     def variants_check(self):
         if '+superlu-dist' in self.spec and self.spec.satisfies('@:11.4.3'):
-            # For Trilinos v11 we need to force SuperLUDist=OFF,
-            # since only the deprecated SuperLUDist v3.3 together with an Amesos patch
-            # is working.
-            raise RuntimeError('The superlu-dist variant can only be used with Trilinos @12.0.1:')
+            # For Trilinos v11 we need to force SuperLUDist=OFF, since only the
+            # deprecated SuperLUDist v3.3 together with an Amesos patch is
+            # working.
+            raise RuntimeError('The superlu-dist variant can only be used' +
+                               ' with Trilinos @12.0.1:')
 
     def install(self, spec, prefix):
         self.variants_check()
@@ -106,54 +113,75 @@ def install(self, spec, prefix):
         options.extend(std_cmake_args)
 
         mpi_bin = spec['mpi'].prefix.bin
-        options.extend(['-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON',
-                        '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON',
-                        '-DTrilinos_VERBOSE_CONFIGURE:BOOL=OFF',
-                        '-DTrilinos_ENABLE_TESTS:BOOL=OFF',
-                        '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF',
-                        '-DCMAKE_BUILD_TYPE:STRING=%s' % ('DEBUG' if '+debug' in spec else 'RELEASE'),
-                        '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'),
-                        '-DTPL_ENABLE_MPI:BOOL=ON',
-                        '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
-                        '-DTPL_ENABLE_BLAS=ON',
-                        '-DBLAS_LIBRARY_NAMES=blas', # FIXME: don't hardcode names
-                        '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib,
-                        '-DTPL_ENABLE_LAPACK=ON',
-                        '-DLAPACK_LIBRARY_NAMES=lapack',
-                        '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix,
-                        '-DTPL_ENABLE_Boost:BOOL=ON',
-                        '-DBoost_INCLUDE_DIRS:PATH=%s' % spec['boost'].prefix.include,
-                        '-DBoost_LIBRARY_DIRS:PATH=%s' % spec['boost'].prefix.lib,
-                        '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON',
-                        '-DTrilinos_ENABLE_CXX11:BOOL=ON',
-                        '-DTPL_ENABLE_Netcdf:BOOL=ON',
-                        '-DTPL_ENABLE_HYPRE:BOOL=%s' % ('ON' if '+hypre' in spec else 'OFF'),
-                        '-DTPL_ENABLE_HDF5:BOOL=%s' % ('ON' if '+hdf5' in spec else 'OFF'),
-                        ])
+        options.extend([
+            '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON',
+            '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON',
+            '-DTrilinos_VERBOSE_CONFIGURE:BOOL=OFF',
+            '-DTrilinos_ENABLE_TESTS:BOOL=OFF',
+            '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF',
+            '-DCMAKE_BUILD_TYPE:STRING=%s' % (
+                'DEBUG' if '+debug' in spec else 'RELEASE'),
+            '-DBUILD_SHARED_LIBS:BOOL=%s' % (
+                'ON' if '+shared' in spec else 'OFF'),
+            '-DTPL_ENABLE_MPI:BOOL=ON',
+            '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
+            '-DTPL_ENABLE_BLAS=ON',
+            '-DBLAS_LIBRARY_NAMES=blas',  # FIXME: don't hardcode names
+            '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib,
+            '-DTPL_ENABLE_LAPACK=ON',
+            '-DLAPACK_LIBRARY_NAMES=lapack',
+            '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix,
+            '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON',
+            '-DTrilinos_ENABLE_CXX11:BOOL=ON',
+            '-DTPL_ENABLE_Netcdf:BOOL=ON',
+            '-DTPL_ENABLE_HYPRE:BOOL=%s' % (
+                'ON' if '+hypre' in spec else 'OFF'),
+            '-DTPL_ENABLE_HDF5:BOOL=%s' % (
+                'ON' if '+hdf5' in spec else 'OFF'),
+        ])
+
+        if '+boost' in spec:
+            options.extend([
+                '-DTPL_ENABLE_Boost:BOOL=ON',
+                '-DBoost_INCLUDE_DIRS:PATH=%s' % spec['boost'].prefix.include,
+                '-DBoost_LIBRARY_DIRS:PATH=%s' % spec['boost'].prefix.lib
+            ])
+        else:
+            options.extend(['-DTPL_ENABLE_Boost:BOOL=OFF'])
 
         # Fortran lib
-        libgfortran = os.path.dirname (os.popen('%s --print-file-name libgfortran.a' % join_path(mpi_bin,'mpif90') ).read())
+        libgfortran = os.path.dirname(os.popen(
+            '%s --print-file-name libgfortran.a' %
+            join_path(mpi_bin, 'mpif90')).read())
         options.extend([
-            '-DTrilinos_EXTRA_LINK_FLAGS:STRING=-L%s/ -lgfortran' % libgfortran,
+            '-DTrilinos_EXTRA_LINK_FLAGS:STRING=-L%s/ -lgfortran' % (
+                libgfortran),
             '-DTrilinos_ENABLE_Fortran=ON'
         ])
 
         # for build-debug only:
-        #options.extend([
-        #   '-DCMAKE_VERBOSE_MAKEFILE:BOOL=TRUE'
-        #])
+        # options.extend([
+        #    '-DCMAKE_VERBOSE_MAKEFILE:BOOL=TRUE'
+        # ])
 
         # suite-sparse related
         if '+suite-sparse' in spec:
             options.extend([
-                '-DTPL_ENABLE_Cholmod:BOOL=OFF', # FIXME: Trilinos seems to be looking for static libs only, patch CMake TPL file?
-                #'-DTPL_ENABLE_Cholmod:BOOL=ON',
-                #'-DCholmod_LIBRARY_DIRS:PATH=%s' % spec['suite-sparse'].prefix.lib,
-                #'-DCholmod_INCLUDE_DIRS:PATH=%s' % spec['suite-sparse'].prefix.include,
+                # FIXME: Trilinos seems to be looking for static libs only,
+                # patch CMake TPL file?
+                '-DTPL_ENABLE_Cholmod:BOOL=OFF',
+                # '-DTPL_ENABLE_Cholmod:BOOL=ON',
+                # '-DCholmod_LIBRARY_DIRS:PATH=%s' % (
+                #    spec['suite-sparse'].prefix.lib,
+                # '-DCholmod_INCLUDE_DIRS:PATH=%s' % (
+                #    spec['suite-sparse'].prefix.include,
                 '-DTPL_ENABLE_UMFPACK:BOOL=ON',
-                '-DUMFPACK_LIBRARY_DIRS:PATH=%s' % spec['suite-sparse'].prefix.lib,
-                '-DUMFPACK_INCLUDE_DIRS:PATH=%s' % spec['suite-sparse'].prefix.include,
-                '-DUMFPACK_LIBRARY_NAMES=umfpack;amd;colamd;cholmod;suitesparseconfig'
+                '-DUMFPACK_LIBRARY_DIRS:PATH=%s' % (
+                    spec['suite-sparse'].prefix.lib),
+                '-DUMFPACK_INCLUDE_DIRS:PATH=%s' % (
+                    spec['suite-sparse'].prefix.include),
+                '-DUMFPACK_LIBRARY_NAMES=umfpack;amd;colamd;cholmod;' +
+                'suitesparseconfig'
             ])
         else:
             options.extend([
@@ -169,9 +197,11 @@ def install(self, spec, prefix):
                 '-DMETIS_LIBRARY_NAMES=metis',
                 '-DTPL_METIS_INCLUDE_DIRS=%s' % spec['metis'].prefix.include,
                 '-DTPL_ENABLE_ParMETIS:BOOL=ON',
-                '-DParMETIS_LIBRARY_DIRS=%s;%s' % (spec['parmetis'].prefix.lib,spec['metis'].prefix.lib),
+                '-DParMETIS_LIBRARY_DIRS=%s;%s' % (
+                    spec['parmetis'].prefix.lib, spec['metis'].prefix.lib),
                 '-DParMETIS_LIBRARY_NAMES=parmetis;metis',
-                '-DTPL_ParMETIS_INCLUDE_DIRS=%s' % spec['parmetis'].prefix.include
+                '-DTPL_ParMETIS_INCLUDE_DIRS=%s' % (
+                    spec['parmetis'].prefix.include)
             ])
         else:
             options.extend([
@@ -184,11 +214,14 @@ def install(self, spec, prefix):
             options.extend([
                 '-DTPL_ENABLE_MUMPS:BOOL=ON',
                 '-DMUMPS_LIBRARY_DIRS=%s' % spec['mumps'].prefix.lib,
-                '-DMUMPS_LIBRARY_NAMES=dmumps;mumps_common;pord', # order is important!
+                # order is important!
+                '-DMUMPS_LIBRARY_NAMES=dmumps;mumps_common;pord',
                 '-DTPL_ENABLE_SCALAPACK:BOOL=ON',
-                '-DSCALAPACK_LIBRARY_NAMES=scalapack' # FIXME: for MKL it's mkl_scalapack_lp64;mkl_blacs_mpich_lp64
+                # FIXME: for MKL it's mkl_scalapack_lp64;mkl_blacs_mpich_lp64
+                '-DSCALAPACK_LIBRARY_NAMES=scalapack'
             ])
-            # see https://github.com/trilinos/Trilinos/blob/master/packages/amesos/README-MUMPS
+            # see
+            # https://github.com/trilinos/Trilinos/blob/master/packages/amesos/README-MUMPS
             cxx_flags.extend([
                 '-DMUMPS_5_0'
             ])
@@ -201,16 +234,20 @@ def install(self, spec, prefix):
         # superlu-dist:
         if '+superlu-dist' in spec:
             # Amesos, conflicting types of double and complex SLU_D
-            # see https://trilinos.org/pipermail/trilinos-users/2015-March/004731.html
-            # and https://trilinos.org/pipermail/trilinos-users/2015-March/004802.html
+            # see
+            # https://trilinos.org/pipermail/trilinos-users/2015-March/004731.html
+            # and
+            # https://trilinos.org/pipermail/trilinos-users/2015-March/004802.html
             options.extend([
                 '-DTeuchos_ENABLE_COMPLEX:BOOL=OFF',
                 '-DKokkosTSQR_ENABLE_Complex:BOOL=OFF'
             ])
             options.extend([
                 '-DTPL_ENABLE_SuperLUDist:BOOL=ON',
-                '-DSuperLUDist_LIBRARY_DIRS=%s' % spec['superlu-dist'].prefix.lib,
-                '-DSuperLUDist_INCLUDE_DIRS=%s' % spec['superlu-dist'].prefix.include
+                '-DSuperLUDist_LIBRARY_DIRS=%s' %
+                spec['superlu-dist'].prefix.lib,
+                '-DSuperLUDist_INCLUDE_DIRS=%s' %
+                spec['superlu-dist'].prefix.include
             ])
             if spec.satisfies('^superlu-dist@4.0:'):
                 options.extend([
@@ -221,7 +258,6 @@ def install(self, spec, prefix):
                 '-DTPL_ENABLE_SuperLUDist:BOOL=OFF',
             ])
 
-
         # python
         if '+python' in spec:
             options.extend([
@@ -248,23 +284,26 @@ def install(self, spec, prefix):
                 '-DTrilinos_ENABLE_FEI=OFF'
             ])
 
-
         with working_dir('spack-build', create=True):
             cmake('..', *options)
             make()
             make('install')
 
-            # When trilinos is built with Python, libpytrilinos is included through
-            # cmake configure files. Namely, Trilinos_LIBRARIES in TrilinosConfig.cmake
-            # contains pytrilinos. This leads to a run-time error:
-            # Symbol not found: _PyBool_Type
-            # and prevents Trilinos to be used in any C++ code, which links executable
-            # against the libraries listed in Trilinos_LIBRARIES.
-            # See https://github.com/Homebrew/homebrew-science/issues/2148#issuecomment-103614509
+            # When trilinos is built with Python, libpytrilinos is included
+            # through cmake configure files. Namely, Trilinos_LIBRARIES in
+            # TrilinosConfig.cmake contains pytrilinos. This leads to a
+            # run-time error: Symbol not found: _PyBool_Type and prevents
+            # Trilinos to be used in any C++ code, which links executable
+            # against the libraries listed in Trilinos_LIBRARIES.  See
+            # https://github.com/Homebrew/homebrew-science/issues/2148#issuecomment-103614509
             # A workaround it to remove PyTrilinos from the COMPONENTS_LIST :
             if '+python' in self.spec:
-                filter_file(r'(SET\(COMPONENTS_LIST.*)(PyTrilinos;)(.*)',  (r'\1\3'),  '%s/cmake/Trilinos/TrilinosConfig.cmake' % prefix.lib)
+                filter_file(r'(SET\(COMPONENTS_LIST.*)(PyTrilinos;)(.*)',
+                            (r'\1\3'),
+                            '%s/cmake/Trilinos/TrilinosConfig.cmake' %
+                            prefix.lib)
 
-            # The shared libraries are not installed correctly on Darwin; correct this
+            # The shared libraries are not installed correctly on Darwin;
+            # correct this
             if (sys.platform == 'darwin') and ('+shared' in spec):
                 fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/xorg-util-macros/package.py b/var/spack/repos/builtin/packages/xorg-util-macros/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..963d93442f941dfe8d104a0cda7f5facfd5c409f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xorg-util-macros/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class XorgUtilMacros(Package):
+    """The util-macros package contains the m4 macros used by all of the Xorg packages."""
+
+    homepage = "http://www.example.com"
+    url      = "http://ftp.x.org/pub/individual/util/util-macros-1.19.0.tar.bz2"
+
+    version('1.19.0', '1cf984125e75f8204938d998a8b6c1e1')
+
+    def install(self, spec, prefix):
+        configure("--prefix=%s" % prefix)
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/xproto/package.py b/var/spack/repos/builtin/packages/xproto/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..7be6defb83bbf9dd2a3334a04324561050968242
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xproto/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class Xproto(Package):
+    """The Xorg protocol headers provide the header files required to
+       build the system, and to allow other applications to build against
+       the installed X Window system."""
+    homepage = "http://www.x.org/"
+    url      = "https://www.x.org/archive//individual/proto/xproto-7.0.29.tar.gz"
+
+    version('7.0.29', '16a78dd2c5ad73011105c96235f6a0af')
+
+    depends_on("xorg-util-macros")
+
+    def install(self, spec, prefix):
+        configure('--prefix=%s' % prefix)
+        make()
+        make("install")