diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000000000000000000000000000000000000..a1271a94fca4052868e114ba82b9a8b55555e2ad
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,34 @@
+# -*- conf -*-
+# .coveragerc to control coverage.py
+[run]
+branch = True
+source = lib
+omit =
+     lib/spack/spack/test/*
+     lib/spack/env/*
+     lib/spack/docs/*
+     lib/spack/external/*
+
+[report]
+# Regexes for lines to exclude from consideration
+exclude_lines =
+    # Have to re-enable the standard pragma
+    pragma: no cover
+
+    # Don't complain about missing debug-only code:
+    def __repr__
+    if self\.debug
+
+    # Don't complain if tests don't hit defensive assertion code:
+    raise AssertionError
+    raise NotImplementedError
+
+    # Don't complain if non-runnable code isn't run:
+    if 0:
+    if False:
+    if __name__ == .__main__.:
+
+ignore_errors = True
+
+[html]
+directory = htmlcov
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000000000000000000000000000000000000..a1e2fcc1f8abc35423366b1b9709834a1809097e
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,20 @@
+# -*- conf -*-
+# flake8 settings for Spack.
+#
+# Below we describe which flake8 checks Spack ignores and what the
+# rationale is.
+#
+# Let people line things up nicely:
+# - E221: multiple spaces before operator
+# - E241: multiple spaces after ‘,’
+#
+# Spack allows wildcard imports:
+# - F403: disable wildcard import
+#
+# These are required to get the package.py files to test clean.
+# - F821: undefined name (needed for cmake, configure, etc.)
+# - F999: name name be undefined or undefined from star imports.
+#
+[flake8]
+ignore = E221,E241,F403,F821,F999
+max-line-length = 79
diff --git a/.gitignore b/.gitignore
index 4b97de5d507070780e7773cd8672c4edf792c168..643e5d9b03cbdc8db56a0d8745ba8036e5d0e996 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,3 +9,5 @@
 /share/spack/dotkit
 /share/spack/modules
 /TAGS
+/htmlcov
+.coverage
diff --git a/.travis.yml b/.travis.yml
index 1bed6b087407bef0c3a5e6810852b96156fd6445..904143a00f7d447b7a2b0b53f0ae015fa9f92292 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,21 +6,35 @@ python:
 # Use new Travis infrastructure (Docker can't sudo yet)
 sudo: false
 
-# No need to install any deps.
-install: true
+# Install coveralls to obtain code coverage
+install:
+  - "pip install coveralls"
+  - "pip install flake8"
 
 before_install:
   # Need this for the git tests to succeed.
   - git config --global user.email "spack@example.com"
   - git config --global user.name "Test User"
 
+  # Need this to be able to compute the list of changed files
+  - git fetch origin develop:develop
+
 script:
+  # Regular spack setup and tests
   - . share/spack/setup-env.sh
   - spack compilers
   - spack config get compilers
-  - spack test
   - spack install -v libdwarf
 
+  # Run unit tests with code coverage
+  - coverage run bin/spack test
+
+  # Run flake8 code style checks.
+  - share/spack/qa/run-flake8
+
+after_success:
+  - coveralls
+
 notifications:
   email:
     recipients:
diff --git a/README.md b/README.md
index 1977a4fee9641954eed2cb30d57b9ea33cbf53dd..fe00e2af279e6176a131bcec58f76ef7916d2ed3 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,8 @@
 ![image](share/spack/logo/spack-logo-text-64.png "Spack")
 ============
 
-[![Build Status](https://travis-ci.org/LLNL/spack.png?branch=develop)](https://travis-ci.org/LLNL/spack)
+[![Build Status](https://travis-ci.org/LLNL/spack.svg?branch=develop)](https://travis-ci.org/LLNL/spack)
+[![Coverage Status](https://coveralls.io/repos/github/LLNL/spack/badge.svg?branch=develop)](https://coveralls.io/github/LLNL/spack?branch=develop)
 
 Spack is a package management tool designed to support multiple
 versions and configurations of software on a wide variety of platforms
@@ -62,6 +63,11 @@ a [pull request](https://help.github.com/articles/using-pull-requests/).
 When you send your request, make ``develop`` the destination branch on the
 [Spack repository](https://github.com/LLNL/spack).
 
+Your contribution will need to pass all the tests run by the `spack test`
+command, as well as the formatting checks in `share/spack/qa/run-flake8`.
+You should run both of these before submitting your pull request, to
+ensure that the online checks succeed.
+
 Spack is using a rough approximation of the [Git
 Flow](http://nvie.com/posts/a-successful-git-branching-model/)
 branching model.  The ``develop`` branch contains the latest
diff --git a/bin/spack b/bin/spack
index 31165bba9d1ec7c9c8dd42e74e203f92761ee96f..f51cb8a4ecced648a7f5c446f2bb53ba3ad46b85 100755
--- a/bin/spack
+++ b/bin/spack
@@ -152,7 +152,7 @@ def main():
     command = spack.cmd.get_command(args.command)
     try:
         return_val = command(parser, args)
-    except SpackError, e:
+    except SpackError as e:
         e.die()
     except KeyboardInterrupt:
         sys.stderr.write('\n')
diff --git a/etc/spack/modules.yaml b/etc/spack/modules.yaml
index aa2a2c3fe2990d976c1c3ca6c682a149b4b6a4bf..99be5e7b6d88b820c9050145e1df5b271ef4c6eb 100644
--- a/etc/spack/modules.yaml
+++ b/etc/spack/modules.yaml
@@ -5,4 +5,25 @@
 # although users can override these settings in their ~/.spack/modules.yaml.
 # -------------------------------------------------------------------------
 modules:
-  enable: ['tcl', 'dotkit']
+  enable:
+    - tcl
+    - dotkit
+  prefix_inspections:
+    bin:
+      - PATH
+    man:
+      - MANPATH
+    lib:
+      - LIBRARY_PATH
+      - LD_LIBRARY_PATH
+    lib64:
+      - LIBRARY_PATH
+      - LD_LIBRARY_PATH
+    include:
+      - CPATH
+    lib/pkgconfig:
+      - PKGCONFIG
+    lib64/pkgconfig:
+      - PKGCONFIG
+    '':
+      - CMAKE_PREFIX_PATH
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 29791d98c4a63e4a5b08ef3286b03da453e295e4..15db2f7a16cef6af54c7e3fd5ef359dd3dcea1b5 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -788,7 +788,7 @@ versions are now filtered out.
 
 .. _shell-support:
 
-Environment modules
+Integration with module systems
 -------------------------------
 
 .. note::
@@ -798,42 +798,50 @@ Environment modules
    interface and/or generated module names may change in future
    versions.
 
-Spack provides some limited integration with environment module
-systems to make it easier to use the packages it provides.
+Spack provides some integration with
+`Environment Modules <http://modules.sourceforge.net/>`_
+and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ to make
+it easier to use the packages it installed.
+
 
 
 Installing Environment Modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 In order to use Spack's generated environment modules, you must have
 installed the *Environment Modules* package.  On many Linux
-distributions, this can be installed from the vendor's repository.
-For example: ```yum install environment-modules``
-(Fedora/RHEL/CentOS).  If your Linux distribution does not have
-Environment Modules, you can get it with Spack:
+distributions, this can be installed from the vendor's repository:
 
-1. Install with::
+.. code-block:: sh
+
+    yum install environment-modules # (Fedora/RHEL/CentOS)
+    apt-get install environment-modules # (Ubuntu/Debian)
+
+If your Linux distribution does not have
+Environment Modules, you can get it with Spack:
 
 .. code-block:: sh
 
     spack install environment-modules
 
-2. Activate with::
 
-Add the following two lines to your ``.bashrc`` profile (or similar):
+In this case to activate it automatically you need to add the following two
+lines to your ``.bashrc`` profile (or similar):
 
 .. code-block:: sh
 
    MODULES_HOME=`spack location -i environment-modules`
    source ${MODULES_HOME}/Modules/init/bash
 
-In case you use a Unix shell other than bash, substitute ``bash`` by
-the appropriate file in ``${MODULES_HOME}/Modules/init/``.
+If you use a Unix shell other than ``bash``, modify the commands above
+accordingly and source the appropriate file in
+``${MODULES_HOME}/Modules/init/``.
 
 
-Spack and Environment Modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+.. TODO : Add a similar section on how to install dotkit ?
 
+Spack and module systems
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 You can enable shell support by sourcing some files in the
 ``/share/spack`` directory.
 
@@ -841,7 +849,7 @@ For ``bash`` or ``ksh``, run:
 
 .. code-block:: sh
 
-   . $SPACK_ROOT/share/spack/setup-env.sh
+   . ${SPACK_ROOT}/share/spack/setup-env.sh
 
 For ``csh`` and ``tcsh`` run:
 
@@ -853,17 +861,19 @@ For ``csh`` and ``tcsh`` run:
 You can put the above code in your ``.bashrc`` or ``.cshrc``, and
 Spack's shell support will be available on the command line.
 
-When you install a package with Spack, it automatically generates an
-environment module that lets you add the package to your environment.
+When you install a package with Spack, it automatically generates a module file
+that lets you add the package to your environment.
 
-Currently, Spack supports the generation of `TCL Modules
+Currently, Spack supports the generation of `Environment Modules
 <http://wiki.tcl.tk/12999>`_ and `Dotkit
 <https://computing.llnl.gov/?set=jobs&page=dotkit>`_.  Generated
 module files for each of these systems can be found in these
 directories:
 
-  * ``$SPACK_ROOT/share/spack/modules``
-  * ``$SPACK_ROOT/share/spack/dotkit``
+.. code-block:: sh
+
+  ${SPACK_ROOT}/share/spack/modules
+  ${SPACK_ROOT}/share/spack/dotkit
 
 The directories are automatically added to your ``MODULEPATH`` and
 ``DK_NODE`` environment variables when you enable Spack's `shell
@@ -919,8 +929,7 @@ of installed packages.
 
 The names here should look familiar, they're the same ones from
 ``spack find``.  You *can* use the names here directly.  For example,
-you could type either of these commands to load the callpath module
-(assuming dotkit and modules are installed):
+you could type either of these commands to load the callpath module:
 
 .. code-block:: sh
 
@@ -935,7 +944,7 @@ easy to type.  Luckily, Spack has its own interface for using modules
 and dotkits.  You can use the same spec syntax you're used to:
 
   =========================  ==========================
-  Modules                    Dotkit
+  Environment Modules        Dotkit
   =========================  ==========================
   ``spack load <spec>``      ``spack use <spec>``
   ``spack unload <spec>``    ``spack unuse <spec>``
@@ -1002,15 +1011,216 @@ used ``gcc``.  You could therefore just type:
 
 To identify just the one built with the Intel compiler.
 
+Module files generation and customization
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Environment Modules and Dotkit files are generated when packages are installed,
+and are placed in the following directories under the Spack root:
+
+.. code-block:: sh
+
+  ${SPACK_ROOT}/share/spack/modules
+  ${SPACK_ROOT}/share/spack/dotkit
+
+The content that gets written in each module file can be customized in two ways:
+
+  1. overriding part of the ``spack.Package`` API within a ``package.py``
+  2. writing dedicated configuration files
+
+Override ``Package`` API
+^^^^^^^^^^^^^^^^^^^^^^^^
+There are currently two methods in ``spack.Package`` that may affect the content
+of module files:
+
+.. code-block:: python
+
+  def setup_environment(self, spack_env, run_env):
+      """Set up the compile and runtime environments for a package."""
+      pass
+
+.. code-block:: python
 
-Regenerating Module files
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+  def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+      """Set up the environment of packages that depend on this one"""
+      pass
 
-Module and dotkit files are generated when packages are installed, and
-are placed in the following directories under the Spack root:
+As briefly stated in the comments, the first method lets you customize the
+module file content for the package you are currently writing, the second
+allows for modifications to your dependees module file. In both cases one
+needs to fill ``run_env`` with the desired list of environment modifications.
 
-  * ``$SPACK_ROOT/share/spack/modules``
-  * ``$SPACK_ROOT/share/spack/dotkit``
+Example : ``builtin/packages/python/package.py``
+""""""""""""""""""""""""""""""""""""""""""""""""
+
+The ``python`` package that comes with the ``builtin`` Spack repository
+overrides ``setup_dependent_environment`` in the following way:
+
+.. code-block:: python
+
+  def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+        # ...
+        if extension_spec.package.extends(self.spec):
+            run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir))
+
+to insert the appropriate ``PYTHONPATH`` modifications in the module
+files of python packages.
+
+Configuration files
+^^^^^^^^^^^^^^^^^^^
+
+Another way of modifying the content of module files is writing a
+``modules.yaml`` configuration file. Following usual Spack conventions, this
+file can be placed either at *site* or *user* scope.
+
+The default site configuration reads:
+
+ .. literalinclude:: ../../../etc/spack/modules.yaml
+    :language: yaml
+
+It basically inspects the installation prefixes for the
+existence of a few folders and, if they exist, it prepends a path to a given
+list of environment variables.
+
+For each module system that can be enabled a finer configuration is possible:
+
+.. code-block:: yaml
+
+ modules:
+   tcl:
+     # contains environment modules specific customizations
+   dotkit:
+     # contains dotkit specific customizations
+
+The structure under the ``tcl`` and ``dotkit`` keys is almost equal, and will
+be showcased in the following by some examples.
+
+Select module files by spec constraints
+"""""""""""""""""""""""""""""""""""""""
+Using spec syntax it's possible to have different customizations for different
+groups of module files.
+
+Considering :
+
+.. code-block:: yaml
+
+ modules:
+   tcl:
+     all: # Default addition for every package
+       environment:
+         set:
+           BAR: 'bar'
+     ^openmpi:: # A double ':' overrides previous rules
+       environment:
+         set:
+           BAR: 'baz'
+     zlib:
+       environment:
+         prepend_path:
+           LD_LIBRARY_PATH: 'foo'
+     zlib%gcc@4.8:
+       environment:
+         unset:
+         - FOOBAR
+
+what will happen is that:
+
+ - every module file will set ``BAR=bar``
+ - unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``
+ - any spec that satisfies ``zlib`` will additionally prepend ``foo`` to ``LD_LIBRARY_PATH``
+ - any spec that satisfies ``zlib%gcc@4.8`` will additionally unset ``FOOBAR``
+
+.. note::
+  Order does matter
+    The modifications associated with the ``all`` keyword are always evaluated
+    first, no matter where they appear in the configuration file. All the other
+    spec constraints are instead evaluated top to bottom.
+
+Filter modifications out of module files
+""""""""""""""""""""""""""""""""""""""""
+
+Modifications to certain environment variables in module files are generated by
+default. Suppose you would like to avoid having ``CPATH`` and ``LIBRARY_PATH``
+modified by your dotkit modules. Then :
+
+.. code-block:: yaml
+
+  modules:
+    dotkit:
+      all:
+        filter:
+          environment_blacklist: ['CPATH', 'LIBRARY_PATH']  # Exclude changes to any of these variables
+
+will generate dotkit module files that will not contain modifications to either
+``CPATH`` or ``LIBRARY_PATH`` and environment module files that instead will
+contain those modifications.
+
+Autoload dependencies
+"""""""""""""""""""""
+
+The following lines in ``modules.yaml``:
+
+.. code-block:: yaml
+
+  modules:
+    tcl:
+      all:
+        autoload: 'direct'
+
+will produce environment module files that will automatically load their direct
+dependencies.
+
+.. note::
+  Allowed values for ``autoload`` statements
+    Allowed values for ``autoload`` statements are either ``none``, ``direct``
+    or ``all``. In ``tcl`` configuration it is possible to use the option
+    ``prerequisites`` that accepts the same values and will add ``prereq``
+    statements instead of automatically loading other modules.
+
+Blacklist or whitelist the generation of specific module files
+""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
+
+Sometimes it is desirable not to generate module files, a common use case being
+not providing the users with software built using the system compiler.
+
+A configuration file like:
+
+.. code-block:: yaml
+
+  modules:
+    tcl:
+      whitelist: ['gcc', 'llvm']  # Whitelist will have precedence over blacklist
+      blacklist: ['%gcc@4.4.7']  # Assuming gcc@4.4.7 is the system compiler
+
+will skip module file generation for anything that satisfies ``%gcc@4.4.7``,
+with the exception of specs that satisfy ``gcc`` or ``llvm``.
+
+Customize the naming scheme and insert conflicts
+""""""""""""""""""""""""""""""""""""""""""""""""
+
+A configuration file like:
+
+.. code-block:: yaml
+
+  modules:
+    tcl:
+      naming_scheme: '{name}/{version}-{compiler.name}-{compiler.version}'
+      all:
+        conflict: ['{name}', 'intel/14.0.1']
+
+will create module files that will conflict with ``intel/14.0.1`` and with the
+base directory of the same module, effectively preventing the possibility to
+load two or more versions of the same software at the same time.
+
+.. note::
+  Tokens available for the naming scheme
+    currently only the tokens shown in the example are available to construct
+    the naming scheme
+
+.. note::
+  The ``conflict`` option is ``tcl`` specific
+
+Regenerating module files
+^^^^^^^^^^^^^^^^^^^^^^^^^
 
 Sometimes you may need to regenerate the modules files.  For example,
 if newer, fancier module support is added to Spack at some later date,
@@ -1020,7 +1230,7 @@ new features.
 .. _spack-module:
 
 ``spack module refresh``
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+""""""""""""""""""""""""
 
 Running ``spack module refresh`` will remove the
 ``share/spack/modules`` and ``share/spack/dotkit`` directories, then
@@ -1246,6 +1456,51 @@ several variants:
 
        spack deactivate -a python
 
+Filesystem requirements
+--------------------------
+
+Spack currently needs to be run from a filesystem that supports
+``flock`` locking semantics.  Nearly all local filesystems and recent
+versions of NFS support this, but parallel filesystems may be mounted
+without ``flock`` support enabled.  You can determine how your
+filesystems are mounted with ``mount -p``.  The output for a Lustre
+filesystem might look like this:
+
+.. code-block:: sh
+
+   $ mount -l | grep lscratch
+   pilsner-mds1-lnet0@o2ib100:/lsd on /p/lscratchd type lustre (rw,nosuid,noauto,_netdev,lazystatfs,flock)
+   porter-mds1-lnet0@o2ib100:/lse on /p/lscratche type lustre (rw,nosuid,noauto,_netdev,lazystatfs,flock)
+
+Note the ``flock`` option on both Lustre mounts.  If you do not see
+this or a similar option for your filesystem, you may need ot ask your
+system administrator to enable ``flock``.
+
+This issue typically manifests with the error below:
+
+.. code-block:: sh
+
+   $ ./spack find
+   Traceback (most recent call last):
+   File "./spack", line 176, in <module>
+     main()
+   File "./spack", line 154, in main
+     return_val = command(parser, args)
+   File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
+     specs = set(spack.installed_db.query(**q_args))
+   File "./spack/lib/spack/spack/database.py", line 551, in query
+     with self.read_transaction():
+   File "./spack/lib/spack/spack/database.py", line 598, in __enter__
+     if self._enter() and self._acquire_fn:
+   File "./spack/lib/spack/spack/database.py", line 608, in _enter
+     return self._db.lock.acquire_read(self._timeout)
+   File "./spack/lib/spack/llnl/util/lock.py", line 103, in acquire_read
+     self._lock(fcntl.LOCK_SH, timeout)   # can raise LockError.
+   File "./spack/lib/spack/llnl/util/lock.py", line 64, in _lock
+     fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
+   IOError: [Errno 38] Function not implemented
+
+A nicer error message is TBD in future versions of Spack.
 
 Getting Help
 -----------------------
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 34d11308f5e7dc58f4d77f513a4637d04ad02205..1b7941ab2418607a95af923016f59981aeacb1b4 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1803,15 +1803,15 @@ Compile-time library search paths
   * ``-L$dep_prefix/lib``
   * ``-L$dep_prefix/lib64``
 Runtime library search paths (RPATHs)
-  * ``-Wl,-rpath,$dep_prefix/lib``
-  * ``-Wl,-rpath,$dep_prefix/lib64``
+  * ``$rpath_flag$dep_prefix/lib``
+  * ``$rpath_flag$dep_prefix/lib64``
 Include search paths
   * ``-I$dep_prefix/include``
 
 An example of this would be the ``libdwarf`` build, which has one
 dependency: ``libelf``.  Every call to ``cc`` in the ``libdwarf``
 build will have ``-I$LIBELF_PREFIX/include``,
-``-L$LIBELF_PREFIX/lib``, and ``-Wl,-rpath,$LIBELF_PREFIX/lib``
+``-L$LIBELF_PREFIX/lib``, and ``$rpath_flag$LIBELF_PREFIX/lib``
 inserted on the command line.  This is done transparently to the
 project's build system, which will just think it's using a system
 where ``libelf`` is readily available.  Because of this, you **do
@@ -1831,6 +1831,31 @@ successfully find ``libdwarf.h`` and ``libdwarf.so``, without the
 packager having to provide ``--with-libdwarf=/path/to/libdwarf`` on
 the command line.
 
+.. note::
+
+    For most compilers, ``$rpath_flag`` is ``-Wl,-rpath,``. However, NAG
+    passes its flags to GCC instead of passing them directly to the linker.
+    Therefore, its ``$rpath_flag`` is doubly wrapped: ``-Wl,-Wl,,-rpath,``.
+    ``$rpath_flag`` can be overriden on a compiler specific basis in
+    ``lib/spack/spack/compilers/$compiler.py``.
+
+Compiler flags
+~~~~~~~~~~~~~~
+In rare circumstances such as compiling and running small unit tests, a package
+developer may need to know what are the appropriate compiler flags to enable
+features like ``OpenMP``, ``c++11``, ``c++14`` and alike. To that end the
+compiler classes in ``spack`` implement the following _properties_ :
+``openmp_flag``, ``cxx11_flag``, ``cxx14_flag``, which can be accessed in a
+package by ``self.compiler.cxx11_flag`` and alike. Note that the implementation
+is such that if a given compiler version does not support this feature, an
+error will be produced. Therefore package developers can also use these properties
+to assert that a compiler supports the requested feature. This is handy when a
+package supports additional variants like
+
+.. code-block:: python
+
+   variant('openmp', default=True, description="Enable OpenMP support.")
+
 Message Parsing Interface (MPI)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 It is common for high performance computing software/packages to use ``MPI``.
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 0359dd8a117983c563d6a60157a7450a45670b95..7dfb0eaa0d3ce1728bfb8c8403581193d13d9c01 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -38,15 +38,20 @@
 #      -Wl,-rpath   arguments for dependency /lib directories.
 #
 
-# This is the list of environment variables that need to be set before
+# This is an array of environment variables that need to be set before
 # the script runs. They are set by routines in spack.build_environment
 # as part of spack.package.Package.do_install().
-parameters="
-SPACK_PREFIX
-SPACK_ENV_PATH
-SPACK_DEBUG_LOG_DIR
-SPACK_COMPILER_SPEC
-SPACK_SHORT_SPEC"
+parameters=(
+    SPACK_PREFIX
+    SPACK_ENV_PATH
+    SPACK_DEBUG_LOG_DIR
+    SPACK_COMPILER_SPEC
+    SPACK_CC_RPATH_ARG
+    SPACK_CXX_RPATH_ARG
+    SPACK_F77_RPATH_ARG
+    SPACK_FC_RPATH_ARG
+    SPACK_SHORT_SPEC
+)
 
 # The compiler input variables are checked for sanity later:
 #   SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
@@ -67,7 +72,7 @@ function die {
     exit 1
 }
 
-for param in $parameters; do
+for param in ${parameters[@]}; do
     if [[ -z ${!param} ]]; then
         die "Spack compiler must be run from Spack! Input '$param' is missing."
     fi
@@ -88,6 +93,7 @@ done
 #    ccld    compile & link
 
 command=$(basename "$0")
+comp="CC"
 case "$command" in
     cpp)
         mode=cpp
@@ -95,21 +101,25 @@ case "$command" in
     cc|c89|c99|gcc|clang|icc|pgcc|xlc)
         command="$SPACK_CC"
         language="C"
+        comp="CC"
         lang_flags=C
         ;;
     c++|CC|g++|clang++|icpc|pgc++|xlc++)
         command="$SPACK_CXX"
         language="C++"
+        comp="CXX"
         lang_flags=CXX
         ;;
     f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
         command="$SPACK_FC"
         language="Fortran 90"
+        comp="FC"
         lang_flags=F
         ;;
     f77|gfortran|ifort|pgfortran|xlf|nagfor)
         command="$SPACK_F77"
         language="Fortran 77"
+        comp="F77"
         lang_flags=F
         ;;
     ld)
@@ -149,6 +159,9 @@ if [[ -z $mode ]]; then
     done
 fi
 
+# Set up rpath variable according to language.
+eval rpath=\$SPACK_${comp}_RPATH_ARG
+
 # Dump the version and exit if we're in testing mode.
 if [[ $SPACK_TEST_COMMAND == dump-mode ]]; then
     echo "$mode"
@@ -231,7 +244,7 @@ for dep in "${deps[@]}"; do
     # Prepend lib and RPATH directories
     if [[ -d $dep/lib ]]; then
         if [[ $mode == ccld ]]; then
-            $add_rpaths && args=("-Wl,-rpath,$dep/lib" "${args[@]}")
+            $add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
             args=("-L$dep/lib" "${args[@]}")
         elif [[ $mode == ld ]]; then
             $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
@@ -242,7 +255,7 @@ for dep in "${deps[@]}"; do
     # Prepend lib64 and RPATH directories
     if [[ -d $dep/lib64 ]]; then
         if [[ $mode == ccld ]]; then
-            $add_rpaths && args=("-Wl,-rpath,$dep/lib64" "${args[@]}")
+            $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
             args=("-L$dep/lib64" "${args[@]}")
         elif [[ $mode == ld ]]; then
             $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
@@ -253,9 +266,11 @@ done
 
 # Include all -L's and prefix/whatever dirs in rpath
 if [[ $mode == ccld ]]; then
-    $add_rpaths && args=("-Wl,-rpath,$SPACK_PREFIX/lib" "-Wl,-rpath,$SPACK_PREFIX/lib64" "${args[@]}")
+    $add_rpaths && args=("$rpath$SPACK_PREFIX/lib64" "${args[@]}")
+    $add_rpaths && args=("$rpath$SPACK_PREFIX/lib"   "${args[@]}")
 elif [[ $mode == ld ]]; then
-    $add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "-rpath" "$SPACK_PREFIX/lib64" "${args[@]}")
+    $add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib64" "${args[@]}")
+    $add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib"   "${args[@]}")
 fi
 
 # Add SPACK_LDLIBS to args
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index 2701fab90c9e17c584b02447157d413cacbfe54e..62c25c80037c05511fc91f3d5be5305fd9796e0d 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -34,14 +34,14 @@
 
 class InvalidSysTypeError(serr.SpackError):
     def __init__(self, sys_type):
-        super(InvalidSysTypeError, self).__init__(
-            "Invalid sys_type value for Spack: " + sys_type)
+        super(InvalidSysTypeError,
+              self).__init__("Invalid sys_type value for Spack: " + sys_type)
 
 
 class NoSysTypeError(serr.SpackError):
     def __init__(self):
-        super(NoSysTypeError, self).__init__(
-            "Could not determine sys_type for this machine.")
+        super(NoSysTypeError,
+              self).__init__("Could not determine sys_type for this machine.")
 
 
 def get_sys_type_from_spack_globals():
@@ -69,15 +69,15 @@ def get_sys_type_from_platform():
 @memoized
 def sys_type():
     """Returns a SysType for the current machine."""
-    methods = [get_sys_type_from_spack_globals,
-               get_sys_type_from_environment,
+    methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
                get_sys_type_from_platform]
 
     # search for a method that doesn't return None
     sys_type = None
     for method in methods:
         sys_type = method()
-        if sys_type: break
+        if sys_type:
+            break
 
     # Couldn't determine the sys_type for this machine.
     if sys_type is None:
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 2c9feca8aeaf8de43e7cc9607aa37d7754f4b485..1adaccdcff1538916c01cd4ebf137817849ef904 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -38,9 +38,6 @@
 
 import spack
 from spack.environment import EnvironmentModifications, validate
-import spack.compilers as compilers
-import spack.compiler as Compiler
-from spack.util.executable import Executable, which
 from spack.util.environment import *
 from spack.util.executable import Executable, which
 
@@ -118,7 +115,14 @@ def set_compiler_environment_variables(pkg, env):
     if compiler.f77:
         env.set('SPACK_F77', compiler.f77)
     if compiler.fc:
-        env.set('SPACK_FC', compiler.fc)
+        env.set('SPACK_FC',  compiler.fc)
+
+    # Set SPACK compiler rpath flags so that our wrapper knows what to use
+    env.set('SPACK_CC_RPATH_ARG',  compiler.cc_rpath_arg)
+    env.set('SPACK_CXX_RPATH_ARG', compiler.cxx_rpath_arg)
+    env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
+    env.set('SPACK_FC_RPATH_ARG',  compiler.fc_rpath_arg)
+
     # Add every valid compiler flag to the environment, prefixed with "SPACK_"
     for flag in spack.spec.FlagMap.valid_compiler_flags():
         # Concreteness guarantees key safety here
@@ -186,8 +190,8 @@ def set_build_environment_variables(pkg, env):
     # Add any pkgconfig directories to PKG_CONFIG_PATH
     pkg_config_dirs = []
     for p in dep_prefixes:
-        for libdir in ('lib', 'lib64'):
-            pcdir = join_path(p, libdir, 'pkgconfig')
+        for maybe in ('lib', 'lib64', 'share'):
+            pcdir = join_path(p, maybe, 'pkgconfig')
             if os.path.isdir(pcdir):
                 pkg_config_dirs.append(pcdir)
     env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index a8e9e2a7a51703d829b949ad5439b6959a06f1b1..d3f8779d32ef1c29292351e10588fe80f2326d7d 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -22,19 +22,18 @@
 # along with this program; if not, write to the Free Software Foundation,
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
-import sys
 import argparse
+import sys
 
 import llnl.util.tty as tty
-from llnl.util.tty.color import colorize
-from llnl.util.tty.colify import colify
-from llnl.util.lang import index_by
-
 import spack.compilers
-import spack.spec
 import spack.config
-from spack.util.environment import get_path
+import spack.spec
+from llnl.util.lang import index_by
+from llnl.util.tty.colify import colify
+from llnl.util.tty.color import colorize
 from spack.spec import CompilerSpec
+from spack.util.environment import get_path
 
 description = "Manage compilers"
 
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index f0cd50b8df6957dc584a5fff23560db8d91e634c..e3a31806ab82b70a5b6441e8451c08cb1f56b183 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -124,10 +124,12 @@ def __call__(self, stage):
         autotools = "configure('--prefix=%s' % prefix)"
         cmake     = "cmake('.', *std_cmake_args)"
         python    = "python('setup.py', 'install', '--prefix=%s' % prefix)"
+        r         = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)"
 
         config_lines = ((r'/configure$',      'autotools', autotools),
                         (r'/CMakeLists.txt$', 'cmake',     cmake),
-                        (r'/setup.py$',       'python',    python))
+                        (r'/setup.py$',       'python',    python),
+                        (r'/NAMESPACE$',      'r',         r))
 
         # Peek inside the tarball.
         tar = which('tar')
@@ -272,6 +274,10 @@ def create(parser, args):
     if guesser.build_system == 'python':
         name = 'py-%s' % name
 
+    # Prepend 'r-' to R package names, by convention.
+    if guesser.build_system == 'r':
+        name = 'r-%s' % name
+
     # Create a directory for the new package.
     pkg_path = repo.filename_for_package_name(name)
     if os.path.exists(pkg_path) and not args.force:
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index a67f5c0c137b86a8d07c5d1478e165ed93df9c1e..cfe59c8d98780c4f7e08f03a6b33d4d147762079 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -32,18 +32,21 @@
 from spack.modules import module_types
 from spack.util.string import *
 
-description ="Manipulate modules and dotkits."
+description = "Manipulate modules and dotkits."
 
 
 def setup_parser(subparser):
     sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
 
-    refresh_parser = sp.add_parser('refresh', help='Regenerate all module files.')
+    sp.add_parser('refresh', help='Regenerate all module files.')
 
     find_parser = sp.add_parser('find', help='Find module files for packages.')
-    find_parser.add_argument(
-        'module_type', help="Type of module to find file for. [" + '|'.join(module_types) + "]")
-    find_parser.add_argument('spec', nargs='+', help='spec to find a module file for.')
+    find_parser.add_argument('module_type',
+                             help="Type of module to find file for. [" +
+                             '|'.join(module_types) + "]")
+    find_parser.add_argument('spec',
+                             nargs='+',
+                             help='spec to find a module file for.')
 
 
 def module_find(mtype, spec_array):
@@ -53,7 +56,8 @@ def module_find(mtype, spec_array):
        should type to use that package's module.
     """
     if mtype not in module_types:
-        tty.die("Invalid module type: '%s'.  Options are %s" % (mtype, comma_or(module_types)))
+        tty.die("Invalid module type: '%s'.  Options are %s" %
+                (mtype, comma_or(module_types)))
 
     specs = spack.cmd.parse_specs(spec_array)
     if len(specs) > 1:
@@ -89,7 +93,6 @@ def module_refresh():
             shutil.rmtree(cls.path, ignore_errors=False)
         mkdirp(cls.path)
         for spec in specs:
-            tty.debug("   Writing file for %s" % spec)
             cls(spec).write()
 
 
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index e9c1dc5779874a07f9252c07b2b46780057a0515..36f49cd4f3e98601cf1800af73efffe3cd32d369 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -91,14 +91,22 @@ class Compiler(object):
     # version suffix for gcc.
     suffixes = [r'-.*']
 
-    # Names of generic arguments used by this compiler
-    arg_rpath   = '-Wl,-rpath,%s'
+    # Default flags used by a compiler to set an rpath
+    @property
+    def cc_rpath_arg(self):
+        return '-Wl,-rpath,'
+
+    @property
+    def cxx_rpath_arg(self):
+        return '-Wl,-rpath,'
 
-    # argument used to get C++11 options
-    cxx11_flag = "-std=c++11"
+    @property
+    def f77_rpath_arg(self):
+        return '-Wl,-rpath,'
 
-    # argument used to get C++14 options
-    cxx14_flag = "-std=c++1y"
+    @property
+    def fc_rpath_arg(self):
+        return '-Wl,-rpath,'
 
 
     def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
@@ -129,6 +137,37 @@ def check(exe):
     def version(self):
         return self.spec.version
 
+    # This property should be overridden in the compiler subclass if
+    # OpenMP is supported by that compiler
+    @property
+    def openmp_flag(self):
+        # If it is not overridden, assume it is not supported and warn the user
+        tty.die("The compiler you have chosen does not currently support OpenMP.",
+                "If you think it should, please edit the compiler subclass and",
+                "submit a pull request or issue.")
+
+
+    # This property should be overridden in the compiler subclass if
+    # C++11 is supported by that compiler
+    @property
+    def cxx11_flag(self):
+        # If it is not overridden, assume it is not supported and warn the user
+        tty.die("The compiler you have chosen does not currently support C++11.",
+                "If you think it should, please edit the compiler subclass and",
+                "submit a pull request or issue.")
+
+
+    # This property should be overridden in the compiler subclass if
+    # C++14 is supported by that compiler
+    @property
+    def cxx14_flag(self):
+        # If it is not overridden, assume it is not supported and warn the user
+        tty.die("The compiler you have chosen does not currently support C++14.",
+                "If you think it should, please edit the compiler subclass and",
+                "submit a pull request or issue.")
+
+
+
     #
     # Compiler classes have methods for querying the version of
     # specific compiler executables.  This is used when discovering compilers.
@@ -213,6 +252,10 @@ def check(key):
                 return None
 
         successful = [key for key in parmap(check, checks) if key is not None]
+        # The 'successful' list is ordered like the input paths.
+        # Reverse it here so that the dict creation (last insert wins)
+        # does not spoil the intented precedence.
+        successful.reverse()
         return dict(((v, p, s), path) for v, p, s, path in successful)
 
     @classmethod
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index e406d86a24525b1deeae0fe5646d4054aa59cf93..8c646905c7b32c86290a07c44fe7e3e73b0bd170 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -26,6 +26,8 @@
 import spack.compiler as cpr
 from spack.compiler import *
 from spack.util.executable import *
+import llnl.util.tty as tty
+from spack.version import ver
 
 class Clang(Compiler):
     # Subclasses use possible names of C compiler
@@ -47,6 +49,29 @@ class Clang(Compiler):
                    'f77' : 'f77',
                    'fc'  : 'f90' }
 
+    @property
+    def is_apple(self):
+        ver_string = str(self.version)
+        return ver_string.endswith('-apple')
+
+    @property
+    def openmp_flag(self):
+        if self.is_apple:
+            tty.die("Clang from Apple does not support Openmp yet.")
+        else:
+            return "-fopenmp"
+
+    @property
+    def cxx11_flag(self):
+        if self.is_apple:
+            # FIXME: figure out from which version Apple's clang supports c++11
+            return "-std=c++11"
+        else:
+            if self.version < ver('3.3'):
+                tty.die("Only Clang 3.3 and above support c++11.")
+            else:
+                return "-std=c++11"
+
     @classmethod
     def default_version(self, comp):
         """The '--version' option works for clang compilers.
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 2e57e4485625852344fa83f987918501559abaa3..91c498ac82edbb760dbbc8167b26dc74673b8587 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -49,6 +49,10 @@ class Gcc(Compiler):
                   'f77' : 'gcc/gfortran',
                   'fc'  : 'gcc/gfortran' }
 
+    @property
+    def openmp_flag(self):
+        return "-fopenmp"
+
     @property
     def cxx11_flag(self):
         if self.version < ver('4.3'):
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 69e97647906bd76b7f6e1133f429d7155b6b50c1..9b1cf07c36ee7174731f8da738f296875e43f172 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -23,6 +23,8 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack.compiler import *
+import llnl.util.tty as tty
+from spack.version import ver
 
 class Intel(Compiler):
     # Subclasses use possible names of C compiler
@@ -43,6 +45,13 @@ class Intel(Compiler):
                    'f77' : 'intel/ifort',
                    'fc'  : 'intel/ifort' }
 
+    @property
+    def openmp_flag(self):
+        if self.version < ver('16.0'):
+            return "-openmp"
+        else:
+            return "-qopenmp"
+
     @property
     def cxx11_flag(self):
         if self.version < ver('11.1'):
@@ -68,5 +77,3 @@ def default_version(cls, comp):
         """
         return get_compiler_version(
             comp, '--version', r'\((?:IFORT|ICC)\) ([^ ]+)')
-
-
diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py
index 527a05a090cf100f649c7dcd433a71fce5027b96..49b77eae6b0990d5c5545b6f5fa086d43c115643 100644
--- a/lib/spack/spack/compilers/nag.py
+++ b/lib/spack/spack/compilers/nag.py
@@ -1,4 +1,5 @@
 from spack.compiler import *
+import llnl.util.tty as tty
 
 class Nag(Compiler):
     # Subclasses use possible names of C compiler
@@ -20,6 +21,27 @@ class Nag(Compiler):
                    'f77' : 'nag/nagfor',
                    'fc'  : 'nag/nagfor' }
 
+    @property
+    def openmp_flag(self):
+        return "-openmp"
+
+    @property
+    def cxx11_flag(self):
+        # NAG does not have a C++ compiler
+        # However, it can be mixed with a compiler that does support it
+        return "-std=c++11"
+
+    # Unlike other compilers, the NAG compiler passes options to GCC, which
+    # then passes them to the linker. Therefore, we need to doubly wrap the
+    # options with '-Wl,-Wl,,'
+    @property
+    def f77_rpath_arg(self):
+        return '-Wl,-Wl,,-rpath,'
+
+    @property
+    def fc_rpath_arg(self):
+        return '-Wl,-Wl,,-rpath,'
+
     @classmethod
     def default_version(self, comp):
         """The '-V' option works for nag compilers.
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index c6a1078bd9ffecda87b8a1f31a34f66857321f5e..94c6b8365cda380d46e460bc6253a99318dae280 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -23,6 +23,7 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack.compiler import *
+import llnl.util.tty as tty
 
 class Pgi(Compiler):
     # Subclasses use possible names of C compiler
@@ -43,6 +44,15 @@ class Pgi(Compiler):
                    'f77' : 'pgi/pgfortran',
                    'fc'  : 'pgi/pgfortran' }
 
+    @property
+    def openmp_flag(self):
+        return "-mp"
+
+    @property
+    def cxx11_flag(self):
+        return "-std=c++11"
+
+
     @classmethod
     def default_version(cls, comp):
         """The '-V' option works for all the PGI compilers.
@@ -54,4 +64,3 @@ def default_version(cls, comp):
         """
         return get_compiler_version(
             comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target')
-
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index c1d55109a3756b523ede7aa77afd0e092ba074a5..61a2e730dce29bb54c52595592eec32abfa7f933 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -24,6 +24,8 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 from spack.compiler import *
+import llnl.util.tty as tty
+from spack.version import ver
 
 class Xl(Compiler):
     # Subclasses use possible names of C compiler
@@ -44,6 +46,10 @@ class Xl(Compiler):
                    'f77' : 'xl/xlf',
                    'fc'  : 'xl/xlf90' }
 
+    @property
+    def openmp_flag(self):
+        return "-qsmp=omp"
+
     @property
     def cxx11_flag(self):
         if self.version < ver('13.1'):
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 14e5aaf4fb624e9532e56b2f2a2f60669b8b3152..2e2787c37cfa29031b4dc4526dad42f019fce979 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -1,3 +1,4 @@
+# flake8: noqa
 ##############################################################################
 # Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
 # Produced at the Lawrence Livermore National Laboratory.
@@ -117,22 +118,20 @@
 the site configuration will be ignored.
 
 """
+import copy
 import os
 import re
 import sys
-import copy
-import jsonschema
-from jsonschema import Draft4Validator, validators
-import yaml
-from yaml.error import MarkedYAMLError
-from ordereddict_backport import OrderedDict
 
+import jsonschema
 import llnl.util.tty as tty
-from llnl.util.filesystem import mkdirp
-import copy
-
 import spack
+import yaml
+from jsonschema import Draft4Validator, validators
+from llnl.util.filesystem import mkdirp
+from ordereddict_backport import OrderedDict
 from spack.error import SpackError
+from yaml.error import MarkedYAMLError
 
 # Hacked yaml for configuration files preserves line numbers.
 import spack.util.spack_yaml as syaml
@@ -146,7 +145,7 @@
         'type': 'object',
         'additionalProperties': False,
         'patternProperties': {
-            'compilers:?': { # optional colon for overriding site config.
+            'compilers:?': {  # optional colon for overriding site config.
                 'type': 'object',
                 'default': {},
                 'additionalProperties': False,
@@ -195,6 +194,7 @@
                 'default': [],
                 'items': {
                     'type': 'string'},},},},
+
     'packages': {
         '$schema': 'http://json-schema.org/schema#',
         'title': 'Spack package configuration file schema',
@@ -238,24 +238,120 @@
                                 'default' : {},
                             }
                         },},},},},},
+
     'modules': {
         '$schema': 'http://json-schema.org/schema#',
         'title': 'Spack module file configuration file schema',
         'type': 'object',
         'additionalProperties': False,
+        'definitions': {
+            'array_of_strings': {
+                'type': 'array',
+                'default': [],
+                'items': {
+                    'type': 'string'
+                }
+            },
+            'dictionary_of_strings': {
+                'type': 'object',
+                'patternProperties': {
+                    r'\w[\w-]*': {  # key
+                        'type': 'string'
+                    }
+                }
+            },
+            'dependency_selection': {
+                'type': 'string',
+                'enum': ['none', 'direct', 'all']
+            },
+            'module_file_configuration': {
+                'type': 'object',
+                'default': {},
+                'additionalProperties': False,
+                'properties': {
+                    'filter': {
+                        'type': 'object',
+                        'default': {},
+                        'additionalProperties': False,
+                        'properties': {
+                            'environment_blacklist': {
+                                'type': 'array',
+                                'default': [],
+                                'items': {
+                                    'type': 'string'
+                                }
+                            }
+                        }
+                    },
+                    'autoload': {'$ref': '#/definitions/dependency_selection'},
+                    'prerequisites': {'$ref': '#/definitions/dependency_selection'},
+                    'conflict': {'$ref': '#/definitions/array_of_strings'},
+                    'environment': {
+                        'type': 'object',
+                        'default': {},
+                        'additionalProperties': False,
+                        'properties': {
+                            'set': {'$ref': '#/definitions/dictionary_of_strings'},
+                            'unset': {'$ref': '#/definitions/array_of_strings'},
+                            'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'},
+                            'append_path': {'$ref': '#/definitions/dictionary_of_strings'}
+                        }
+                    }
+                }
+            },
+            'module_type_configuration': {
+                'type': 'object',
+                'default': {},
+                'anyOf': [
+                    {
+                        'properties': {
+                            'whitelist': {'$ref': '#/definitions/array_of_strings'},
+                            'blacklist': {'$ref': '#/definitions/array_of_strings'},
+                            'naming_scheme': {
+                                'type': 'string'  # Can we be more specific here?
+                            }
+                        }
+                    },
+                    {
+                        'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}}
+                    }
+                ]
+            }
+        },
         'patternProperties': {
             r'modules:?': {
                 'type': 'object',
                 'default': {},
                 'additionalProperties': False,
                 'properties': {
+                    'prefix_inspections': {
+                        'type': 'object',
+                        'patternProperties': {
+                            r'\w[\w-]*': {  # path to be inspected for existence (relative to prefix)
+                                '$ref': '#/definitions/array_of_strings'
+                            }
+                        }
+                    },
                     'enable': {
                         'type': 'array',
                         'default': [],
                         'items': {
-                            'type': 'string'
+                            'type': 'string',
+                            'enum': ['tcl', 'dotkit']
                         }
-                    }
+                    },
+                    'tcl': {
+                        'allOf': [
+                            {'$ref': '#/definitions/module_type_configuration'},  # Base configuration
+                            {}  # Specific tcl extensions
+                        ]
+                    },
+                    'dotkit': {
+                        'allOf': [
+                            {'$ref': '#/definitions/module_type_configuration'},  # Base configuration
+                            {}  # Specific dotkit extensions
+                        ]
+                    },
                 }
             },
         },
@@ -269,10 +365,10 @@
 
 
 def validate_section_name(section):
-    """Raise a ValueError if the section is not a valid section."""
+    """Exit if the section is not a valid section."""
     if section not in section_schemas:
-        raise ValueError("Invalid config section: '%s'.  Options are %s"
-                         % (section, section_schemas))
+        tty.die("Invalid config section: '%s'. Options are: %s"
+                % (section, " ".join(section_schemas.keys())))
 
 
 def extend_with_default(validator_class):
@@ -306,13 +402,14 @@ def set_pp_defaults(validator, properties, instance, schema):
             yield err
 
     return validators.extend(validator_class, {
-        "properties" : set_defaults,
-        "patternProperties" : set_pp_defaults
+        "properties": set_defaults,
+        "patternProperties": set_pp_defaults
     })
 
 
 DefaultSettingValidator = extend_with_default(Draft4Validator)
 
+
 def validate_section(data, schema):
     """Validate data read in from a Spack YAML file.
 
@@ -347,16 +444,14 @@ def get_section_filename(self, section):
         validate_section_name(section)
         return os.path.join(self.path, "%s.yaml" % section)
 
-
     def get_section(self, section):
-        if not section in self.sections:
+        if section not in self.sections:
             path   = self.get_section_filename(section)
             schema = section_schemas[section]
             data   = _read_config_file(path, schema)
             self.sections[section] = data
         return self.sections[section]
 
-
     def write_section(self, section):
         filename = self.get_section_filename(section)
         data = self.get_section(section)
@@ -370,7 +465,6 @@ def write_section(self, section):
         except (yaml.YAMLError, IOError) as e:
             raise ConfigFileError("Error writing to config file: '%s'" % str(e))
 
-
     def clear(self):
         """Empty cached config information."""
         self.sections = {}
@@ -413,7 +507,7 @@ def _read_config_file(filename, schema):
 
     elif not os.path.isfile(filename):
         raise ConfigFileError(
-            "Invlaid configuration. %s exists but is not a file." % filename)
+            "Invalid configuration. %s exists but is not a file." % filename)
 
     elif not os.access(filename, os.R_OK):
         raise ConfigFileError("Config file is not readable: %s" % filename)
@@ -476,7 +570,7 @@ def they_are(t):
     # Source dict is merged into dest.
     elif they_are(dict):
         for sk, sv in source.iteritems():
-            if not sk in dest:
+            if sk not in dest:
                 dest[sk] = copy.copy(sv)
             else:
                 dest[sk] = _merge_yaml(dest[sk], source[sk])
@@ -539,14 +633,19 @@ def update_config(section, update_data, scope=None):
        other yaml-ish structure.
 
     """
+    validate_section_name(section)  # validate section name
+    scope = validate_scope(scope)  # get ConfigScope object from string.
+
     # read in the config to ensure we've got current data
-    get_config(section)
+    configuration = get_config(section)
 
-    validate_section_name(section)       # validate section name
-    scope = validate_scope(scope)   # get ConfigScope object from string.
+    if isinstance(update_data, list):
+        configuration = update_data
+    else:
+        configuration.update(update_data)
 
     # read only the requested section's data.
-    scope.sections[section] = { section : update_data }
+    scope.sections[section] = {section: configuration}
     scope.write_section(section)
 
 
@@ -585,16 +684,20 @@ def spec_externals(spec):
 def is_spec_buildable(spec):
     """Return true if the spec pkgspec is configured as buildable"""
     allpkgs = get_config('packages')
-    name = spec.name
-    if not spec.name in allpkgs:
+    if spec.name not in allpkgs:
         return True
-    if not 'buildable' in allpkgs[spec.name]:
+    if 'buildable' not in allpkgs[spec.name]:
         return True
     return allpkgs[spec.name]['buildable']
 
 
-class ConfigError(SpackError): pass
-class ConfigFileError(ConfigError): pass
+class ConfigError(SpackError):
+    pass
+
+
+class ConfigFileError(ConfigError):
+    pass
+
 
 def get_path(path, data):
     if path:
@@ -602,6 +705,7 @@ def get_path(path, data):
     else:
         return data
 
+
 class ConfigFormatError(ConfigError):
     """Raised when a configuration format does not match its schema."""
     def __init__(self, validation_error, data):
@@ -636,5 +740,6 @@ def __init__(self, validation_error, data):
         message = '%s: %s' % (location, validation_error.message)
         super(ConfigError, self).__init__(message)
 
+
 class ConfigSanityError(ConfigFormatError):
     """Same as ConfigFormatError, raised when config is written by Spack."""
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index 72aafa4e2d6b18d25c8ac0dcc7a2ff210cb3a38c..3fbe2531c182414bf9456ea53346aaa31bc44dd0 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -1,7 +1,7 @@
-import os
-import os.path
 import collections
 import inspect
+import os
+import os.path
 
 
 class NameModifier(object):
@@ -26,7 +26,8 @@ def execute(self):
 
 class UnsetEnv(NameModifier):
     def execute(self):
-        os.environ.pop(self.name, None)  # Avoid throwing if the variable was not set
+        # Avoid throwing if the variable was not set
+        os.environ.pop(self.name, None)
 
 
 class SetPath(NameValueModifier):
@@ -55,7 +56,9 @@ class RemovePath(NameValueModifier):
     def execute(self):
         environment_value = os.environ.get(self.name, '')
         directories = environment_value.split(':') if environment_value else []
-        directories = [os.path.normpath(x) for x in directories if x != os.path.normpath(self.value)]
+        directories = [os.path.normpath(x)
+                       for x in directories
+                       if x != os.path.normpath(self.value)]
         os.environ[self.name] = ':'.join(directories)
 
 
@@ -63,7 +66,8 @@ class EnvironmentModifications(object):
     """
     Keeps track of requests to modify the current environment.
 
-    Each call to a method to modify the environment stores the extra information on the caller in the request:
+    Each call to a method to modify the environment stores the extra
+    information on the caller in the request:
     - 'filename' : filename of the module where the caller is defined
     - 'lineno': line number where the request occurred
     - 'context' : line of code that issued the request that failed
@@ -71,10 +75,10 @@ class EnvironmentModifications(object):
 
     def __init__(self, other=None):
         """
-        Initializes a new instance, copying commands from other if it is not None
+        Initializes a new instance, copying commands from other if not None
 
         Args:
-            other: another instance of EnvironmentModifications from which (optional)
+            other: another instance of EnvironmentModifications (optional)
         """
         self.env_modifications = []
         if other is not None:
@@ -93,7 +97,8 @@ def extend(self, other):
     @staticmethod
     def _check_other(other):
         if not isinstance(other, EnvironmentModifications):
-            raise TypeError('other must be an instance of EnvironmentModifications')
+            raise TypeError(
+                'other must be an instance of EnvironmentModifications')
 
     def _get_outside_caller_attributes(self):
         stack = inspect.stack()
@@ -101,12 +106,10 @@ def _get_outside_caller_attributes(self):
             _, filename, lineno, _, context, index = stack[2]
             context = context[index].strip()
         except Exception:
-            filename, lineno, context = 'unknown file', 'unknown line', 'unknown context'
-        args = {
-            'filename': filename,
-            'lineno': lineno,
-            'context': context
-        }
+            filename = 'unknown file'
+            lineno = 'unknown line'
+            context = 'unknown context'
+        args = {'filename': filename, 'lineno': lineno, 'context': context}
         return args
 
     def set(self, name, value, **kwargs):
@@ -170,7 +173,8 @@ def prepend_path(self, name, path, **kwargs):
 
     def remove_path(self, name, path, **kwargs):
         """
-        Stores in the current object a request to remove a path from a path list
+        Stores in the current object a request to remove a path from a path
+        list
 
         Args:
             name: name of the path list in the environment
@@ -185,7 +189,8 @@ def group_by_name(self):
         Returns a dict of the modifications grouped by variable name
 
         Returns:
-            dict mapping the environment variable name to the modifications to be done on it
+            dict mapping the environment variable name to the modifications to
+            be done on it
         """
         modifications = collections.defaultdict(list)
         for item in self:
@@ -203,7 +208,7 @@ def apply_modifications(self):
         Applies the modifications and clears the list
         """
         modifications = self.group_by_name()
-        # Apply the modifications to the environment variables one variable at a time
+        # Apply modifications one variable at a time
         for name, actions in sorted(modifications.items()):
             for x in actions:
                 x.execute()
@@ -224,13 +229,19 @@ def concatenate_paths(paths):
 
 def set_or_unset_not_first(variable, changes, errstream):
     """
-    Check if we are going to set or unset something after other modifications have already been requested
+    Check if we are going to set or unset something after other modifications
+    have already been requested
     """
-    indexes = [ii for ii, item in enumerate(changes) if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
+    indexes = [ii
+               for ii, item in enumerate(changes)
+               if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
     if indexes:
         good = '\t    \t{context} at {filename}:{lineno}'
         nogood = '\t--->\t{context} at {filename}:{lineno}'
-        errstream('Suspicious requests to set or unset the variable \'{var}\' found'.format(var=variable))
+        message = 'Suspicious requests to set or unset the variable \'{var}\' found'  # NOQA: ignore=E501
+        errstream(
+            message.format(
+                var=variable))
         for ii, item in enumerate(changes):
             print_format = nogood if ii in indexes else good
             errstream(print_format.format(**item.args))
@@ -238,8 +249,8 @@ def set_or_unset_not_first(variable, changes, errstream):
 
 def validate(env, errstream):
     """
-    Validates the environment modifications to check for the presence of suspicious patterns. Prompts a warning for
-    everything that was found
+    Validates the environment modifications to check for the presence of
+    suspicious patterns. Prompts a warning for everything that was found
 
     Current checks:
     - set or unset variables after other changes on the same variable
@@ -250,3 +261,20 @@ def validate(env, errstream):
     modifications = env.group_by_name()
     for variable, list_of_changes in sorted(modifications.items()):
         set_or_unset_not_first(variable, list_of_changes, errstream)
+
+
+def filter_environment_blacklist(env, variables):
+    """
+    Generator that filters out any change to environment variables present in
+    the input list
+
+    Args:
+        env: list of environment modifications
+        variables: list of variable names to be filtered
+
+    Yields:
+        items in env if they are not in variables
+    """
+    for item in env:
+        if item.name not in variables:
+            yield item
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index 61624fbd703e8debe12d6ad294ae961501783eee..53f054094acc1dd76af564168bec00778359a83b 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -23,33 +23,35 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 """
-This module contains code for creating environment modules, which can include dotkits, tcl modules, lmod, and others.
+This module contains code for creating environment modules, which can include
+dotkits, tcl modules, lmod, and others.
 
-The various types of modules are installed by post-install hooks and removed after an uninstall by post-uninstall hooks.
-This class consolidates the logic for creating an abstract description of the information that module systems need.
-Currently that includes a number of directories to be appended to paths in the user's environment:
+The various types of modules are installed by post-install hooks and removed
+after an uninstall by post-uninstall hooks. This class consolidates the logic
+for creating an abstract description of the information that module systems
+need.
 
-  * /bin directories to be appended to PATH
-  * /lib* directories for LD_LIBRARY_PATH
-  * /include directories for CPATH
-  * /man* and /share/man* directories for MANPATH
-  * the package prefix for CMAKE_PREFIX_PATH
+This module also includes logic for coming up with unique names for the module
+files so that they can be found by the various shell-support files in
+$SPACK/share/spack/setup-env.*.
 
-This module also includes logic for coming up with unique names for the module files so that they can be found by the
-various shell-support files in $SPACK/share/spack/setup-env.*.
-
-Each hook in hooks/ implements the logic for writing its specific type of module file.
+Each hook in hooks/ implements the logic for writing its specific type of
+module file.
 """
+import copy
+import datetime
 import os
 import os.path
 import re
-import shutil
+import string
 import textwrap
 
 import llnl.util.tty as tty
 import spack
 import spack.config
 from llnl.util.filesystem import join_path, mkdirp
+from spack.build_environment import parent_class_modules
+from spack.build_environment import set_module_variables_for_package
 from spack.environment import *
 
 __all__ = ['EnvModule', 'Dotkit', 'TclModule']
@@ -61,56 +63,175 @@
 
 
 def print_help():
-    """For use by commands to tell user how to activate shell support."""
-
-    tty.msg("This command requires spack's shell integration.",
-            "",
+    """
+    For use by commands to tell user how to activate shell support.
+    """
+    tty.msg("This command requires spack's shell integration.", "",
             "To initialize spack's shell commands, you must run one of",
             "the commands below.  Choose the right command for your shell.",
-            "",
-            "For bash and zsh:",
-            "    . %s/setup-env.sh" % spack.share_path,
-            "",
-            "For csh and tcsh:",
-            "    setenv SPACK_ROOT %s"    % spack.prefix,
-            "    source %s/setup-env.csh" % spack.share_path,
-            "")
+            "", "For bash and zsh:",
+            "    . %s/setup-env.sh" % spack.share_path, "",
+            "For csh and tcsh:", "    setenv SPACK_ROOT %s" % spack.prefix,
+            "    source %s/setup-env.csh" % spack.share_path, "")
 
 
 def inspect_path(prefix):
     """
-    Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment
-    accordingly when an item is found.
+    Inspects the prefix of an installation to search for common layouts. Issues
+    a request to modify the environment accordingly when an item is found.
 
     Args:
         prefix: prefix of the installation
 
     Returns:
-        instance of EnvironmentModifications containing the requested modifications
+        instance of EnvironmentModifications containing the requested
+        modifications
     """
     env = EnvironmentModifications()
     # Inspect the prefix to check for the existence of common directories
-    prefix_inspections = {
-        'bin': ('PATH',),
-        'man': ('MANPATH',),
-        'lib': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
-        'lib64': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
-        'include': ('CPATH',)
-    }
-    for attribute, variables in prefix_inspections.items():
-        expected = getattr(prefix, attribute)
+    prefix_inspections = CONFIGURATION.get('prefix_inspections', {})
+    for relative_path, variables in prefix_inspections.items():
+        expected = join_path(prefix, relative_path)
         if os.path.isdir(expected):
             for variable in variables:
                 env.prepend_path(variable, expected)
-    # PKGCONFIG
-    for expected in (join_path(prefix.lib, 'pkgconfig'), join_path(prefix.lib64, 'pkgconfig')):
-        if os.path.isdir(expected):
-            env.prepend_path('PKG_CONFIG_PATH', expected)
-    # CMake related variables
-    env.prepend_path('CMAKE_PREFIX_PATH', prefix)
     return env
 
 
+def dependencies(spec, request='all'):
+    """
+    Returns the list of dependent specs for a given spec, according to the
+    given request
+
+    Args:
+        spec: target spec
+        request: either 'none', 'direct' or 'all'
+
+    Returns:
+        empty list if 'none', direct dependency list if 'direct', all
+        dependencies if 'all'
+    """
+    if request not in ('none', 'direct', 'all'):
+        message = "Wrong value for argument 'request' : "
+        message += "should be one of ('none', 'direct', 'all')"
+        raise tty.error(message + " [current value is '%s']" % request)
+
+    if request == 'none':
+        return []
+
+    if request == 'direct':
+        return [xx for _, xx in spec.dependencies.items()]
+
+    # FIXME : during module file creation nodes seem to be visited multiple
+    # FIXME : times even if cover='nodes' is given. This work around permits
+    # FIXME : to get a unique list of spec anyhow. Do we miss a merge
+    # FIXME : step among nodes that refer to the same package?
+    seen = set()
+    seen_add = seen.add
+    l = [xx
+         for xx in sorted(
+             spec.traverse(order='post',
+                           depth=True,
+                           cover='nodes',
+                           root=False),
+             reverse=True)]
+    return [xx for ii, xx in l if not (xx in seen or seen_add(xx))]
+
+
+def update_dictionary_extending_lists(target, update):
+    for key in update:
+        value = target.get(key, None)
+        if isinstance(value, list):
+            target[key].extend(update[key])
+        elif isinstance(value, dict):
+            update_dictionary_extending_lists(target[key], update[key])
+        else:
+            target[key] = update[key]
+
+
+def parse_config_options(module_generator):
+    """
+    Parse the configuration file and returns a bunch of items that will be
+    needed during module file generation
+
+    Args:
+        module_generator: module generator for a given spec
+
+    Returns:
+        autoloads: list of specs to be autoloaded
+        prerequisites: list of specs to be marked as prerequisite
+        filters: list of environment variables whose modification is
+        blacklisted in module files
+        env: list of custom environment modifications to be applied in the
+        module file
+    """
+    # Get the configuration for this kind of generator
+    module_configuration = copy.deepcopy(CONFIGURATION.get(
+        module_generator.name, {}))
+
+    #####
+    # Merge all the rules
+    #####
+    module_file_actions = module_configuration.pop('all', {})
+    for spec, conf in module_configuration.items():
+        override = False
+        if spec.endswith(':'):
+            spec = spec.strip(':')
+            override = True
+        if module_generator.spec.satisfies(spec):
+            if override:
+                module_file_actions = {}
+            update_dictionary_extending_lists(module_file_actions, conf)
+
+    #####
+    # Process the common rules
+    #####
+
+    # Automatic loading loads
+    module_file_actions['autoload'] = dependencies(
+        module_generator.spec, module_file_actions.get('autoload', 'none'))
+    # Prerequisites
+    module_file_actions['prerequisites'] = dependencies(
+        module_generator.spec, module_file_actions.get('prerequisites',
+                                                       'none'))
+    # Environment modifications
+    environment_actions = module_file_actions.pop('environment', {})
+    env = EnvironmentModifications()
+
+    def process_arglist(arglist):
+        if method == 'unset':
+            for x in arglist:
+                yield (x, )
+        else:
+            for x in arglist.iteritems():
+                yield x
+
+    for method, arglist in environment_actions.items():
+        for args in process_arglist(arglist):
+            getattr(env, method)(*args)
+
+    return module_file_actions, env
+
+
+def filter_blacklisted(specs, module_name):
+    """
+    Given a sequence of specs, filters the ones that are blacklisted in the
+    module configuration file.
+
+    Args:
+        specs: sequence of spec instances
+        module_name: type of module file objects
+
+    Yields:
+        non blacklisted specs
+    """
+    for x in specs:
+        if module_types[module_name](x).blacklisted:
+            tty.debug('\tFILTER : %s' % x)
+            continue
+        yield x
+
+
 class EnvModule(object):
     name = 'env_module'
     formats = {}
@@ -118,7 +239,8 @@ class EnvModule(object):
     class __metaclass__(type):
         def __init__(cls, name, bases, dict):
             type.__init__(cls, name, bases, dict)
-            if cls.name != 'env_module' and cls.name in CONFIGURATION['enable']:
+            if cls.name != 'env_module' and cls.name in CONFIGURATION[
+                    'enable']:
                 module_types[cls.name] = cls
 
     def __init__(self, spec=None):
@@ -134,8 +256,41 @@ def __init__(self, spec=None):
         # long description is the docstring with reduced whitespace.
         self.long_description = None
         if self.spec.package.__doc__:
-            self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
+            self.long_description = re.sub(r'\s+', ' ',
+                                           self.spec.package.__doc__)
 
+    @property
+    def naming_scheme(self):
+        try:
+            naming_scheme = CONFIGURATION[self.name]['naming_scheme']
+        except KeyError:
+            naming_scheme = self.default_naming_format
+        return naming_scheme
+
+    @property
+    def tokens(self):
+        tokens = {
+            'name': self.spec.name,
+            'version': self.spec.version,
+            'compiler': self.spec.compiler
+        }
+        return tokens
+
+    @property
+    def use_name(self):
+        """
+        Subclasses should implement this to return the name the module command
+        uses to refer to the package.
+        """
+        naming_tokens = self.tokens
+        naming_scheme = self.naming_scheme
+        name = naming_scheme.format(**naming_tokens)
+        name += '-' + self.spec.dag_hash(
+        )  # Always append the hash to make the module file unique
+        # Not everybody is working on linux...
+        parts = name.split('/')
+        name = join_path(*parts)
+        return name
 
     @property
     def category(self):
@@ -144,13 +299,51 @@ def category(self):
             return self.pkg.category
         # Extensions
         for extendee in self.pkg.extendees:
-            return '{extendee} extension'.format(extendee=extendee)
+            return '{extendee}_extension'.format(extendee=extendee)
         # Not very descriptive fallback
-        return 'spack installed package'
+        return 'spack'
 
+    @property
+    def blacklisted(self):
+        configuration = CONFIGURATION.get(self.name, {})
+        whitelist_matches = [x
+                             for x in configuration.get('whitelist', [])
+                             if self.spec.satisfies(x)]
+        blacklist_matches = [x
+                             for x in configuration.get('blacklist', [])
+                             if self.spec.satisfies(x)]
+        if whitelist_matches:
+            message = '\tWHITELIST : %s [matches : ' % self.spec.cshort_spec
+            for rule in whitelist_matches:
+                message += '%s ' % rule
+            message += ' ]'
+            tty.debug(message)
+
+        if blacklist_matches:
+            message = '\tBLACKLIST : %s [matches : ' % self.spec.cshort_spec
+            for rule in blacklist_matches:
+                message += '%s ' % rule
+            message += ' ]'
+            tty.debug(message)
+
+        if not whitelist_matches and blacklist_matches:
+            return True
+
+        return False
 
     def write(self):
-        """Write out a module file for this object."""
+        """
+        Writes out a module file for this object.
+
+        This method employs a template pattern and expects derived classes to:
+        - override the header property
+        - provide formats for autoload, prerequisites and environment changes
+        """
+        if self.blacklisted:
+            return
+        tty.debug("\tWRITE : %s [%s]" %
+                  (self.spec.cshort_spec, self.file_name))
+
         module_dir = os.path.dirname(self.file_name)
         if not os.path.exists(module_dir):
             mkdirp(module_dir)
@@ -159,42 +352,73 @@ def write(self):
         # installation prefix
         env = inspect_path(self.spec.prefix)
 
-        # Let the extendee modify their extensions before asking for
-        # package-specific modifications
+        # Let the extendee/dependency modify their extensions/dependencies
+        # before asking for package-specific modifications
         spack_env = EnvironmentModifications()
-        for item in self.pkg.extendees:
-            try:
-                package = self.spec[item].package
-                package.setup_dependent_package(self.pkg.module, self.spec)
-                package.setup_dependent_environment(spack_env, env, self.spec)
-            except:
-                # The extends was conditional, so it doesn't count here
-                # eg: extends('python', when='+python')
-                pass
+        # TODO : the code down below is quite similar to
+        # TODO : build_environment.setup_package and needs to be factored out
+        # TODO : to a single place
+        for item in dependencies(self.spec, 'all'):
+            package = self.spec[item.name].package
+            modules = parent_class_modules(package.__class__)
+            for mod in modules:
+                set_module_variables_for_package(package, mod)
+            set_module_variables_for_package(package, package.module)
+            package.setup_dependent_package(self.pkg.module, self.spec)
+            package.setup_dependent_environment(spack_env, env, self.spec)
 
         # Package-specific environment modifications
+        set_module_variables_for_package(self.pkg, self.pkg.module)
         self.spec.package.setup_environment(spack_env, env)
 
-        # TODO : implement site-specific modifications and filters
-        if not env:
-            return
-
+        # Parse configuration file
+        module_configuration, conf_env = parse_config_options(self)
+        env.extend(conf_env)
+        filters = module_configuration.get('filter', {}).get(
+            'environment_blacklist', {})
+        # Build up the module file content
+        module_file_content = self.header
+        for x in filter_blacklisted(
+                module_configuration.pop('autoload', []), self.name):
+            module_file_content += self.autoload(x)
+        for x in filter_blacklisted(
+                module_configuration.pop('prerequisites', []), self.name):
+            module_file_content += self.prerequisite(x)
+        for line in self.process_environment_command(
+                filter_environment_blacklist(env, filters)):
+            module_file_content += line
+        for line in self.module_specific_content(module_configuration):
+            module_file_content += line
+
+        # Dump to file
         with open(self.file_name, 'w') as f:
-            self.write_header(f)
-            for line in self.process_environment_command(env):
-                f.write(line)
+            f.write(module_file_content)
 
-    def write_header(self, stream):
+    @property
+    def header(self):
         raise NotImplementedError()
 
+    def module_specific_content(self, configuration):
+        return tuple()
+
+    def autoload(self, spec):
+        m = type(self)(spec)
+        return self.autoload_format.format(module_file=m.use_name)
+
+    def prerequisite(self, spec):
+        m = type(self)(spec)
+        return self.prerequisite_format.format(module_file=m.use_name)
+
     def process_environment_command(self, env):
         for command in env:
             try:
-                yield self.formats[type(command)].format(**command.args)
+                yield self.environment_modifications_formats[type(
+                    command)].format(**command.args)
             except KeyError:
-                tty.warn('Cannot handle command of type {command} : skipping request'.format(command=type(command)))
-                tty.warn('{context} at {filename}:{lineno}'.format(**command.args))
-
+                message = 'Cannot handle command of type {command} : skipping request'  # NOQA: ignore=E501
+                details = '{context} at {filename}:{lineno}'
+                tty.warn(message.format(command=type(command)))
+                tty.warn(details.format(**command.args))
 
     @property
     def file_name(self):
@@ -202,62 +426,65 @@ def file_name(self):
            where this module lives."""
         raise NotImplementedError()
 
-    @property
-    def use_name(self):
-        """Subclasses should implement this to return the name the
-           module command uses to refer to the package."""
-        raise NotImplementedError()
-
     def remove(self):
         mod_file = self.file_name
         if os.path.exists(mod_file):
             try:
                 os.remove(mod_file)  # Remove the module file
-                os.removedirs(os.path.dirname(mod_file))  # Remove all the empty directories from the leaf up
+                os.removedirs(
+                    os.path.dirname(mod_file)
+                )  # Remove all the empty directories from the leaf up
             except OSError:
-                pass  # removedirs throws OSError on first non-empty directory found
+                # removedirs throws OSError on first non-empty directory found
+                pass
 
 
 class Dotkit(EnvModule):
     name = 'dotkit'
     path = join_path(spack.share_path, "dotkit")
 
-    formats = {
+    environment_modifications_formats = {
         PrependPath: 'dk_alter {name} {value}\n',
         SetEnv: 'dk_setenv {name} {value}\n'
     }
 
+    autoload_format = 'dk_op {module_file}\n'
+
+    default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}'  # NOQA: ignore=E501
+
     @property
     def file_name(self):
-        return join_path(Dotkit.path, self.spec.architecture, '%s.dk' % self.use_name)
+        return join_path(Dotkit.path, self.spec.architecture,
+                         '%s.dk' % self.use_name)
 
     @property
-    def use_name(self):
-      return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
-                                 self.spec.compiler.name,
-                                 self.spec.compiler.version,
-                                 self.spec.dag_hash())
-
-    def write_header(self, dk_file):
+    def header(self):
         # Category
+        header = ''
         if self.category:
-            dk_file.write('#c %s\n' % self.category)
+            header += '#c %s\n' % self.category
 
         # Short description
         if self.short_description:
-            dk_file.write('#d %s\n' % self.short_description)
+            header += '#d %s\n' % self.short_description
 
         # Long description
         if self.long_description:
             for line in textwrap.wrap(self.long_description, 72):
-                dk_file.write("#h %s\n" % line)
+                header += '#h %s\n' % line
+        return header
+
+    def prerequisite(self, spec):
+        tty.warn('prerequisites:  not supported by dotkit module files')
+        tty.warn('\tYou may want to check  ~/.spack/modules.yaml')
+        return ''
 
 
 class TclModule(EnvModule):
     name = 'tcl'
     path = join_path(spack.share_path, "modules")
 
-    formats = {
+    environment_modifications_formats = {
         PrependPath: 'prepend-path {name} \"{value}\"\n',
         AppendPath: 'append-path {name} \"{value}\"\n',
         RemovePath: 'remove-path {name} \"{value}\"\n',
@@ -265,28 +492,61 @@ class TclModule(EnvModule):
         UnsetEnv: 'unsetenv {name}\n'
     }
 
+    autoload_format = ('if ![ is-loaded {module_file} ] {{\n'
+                       '    puts stderr "Autoloading {module_file}"\n'
+                       '    module load {module_file}\n'
+                       '}}\n\n')
+
+    prerequisite_format = 'prereq {module_file}\n'
+
+    default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}'  # NOQA: ignore=E501
+
     @property
     def file_name(self):
         return join_path(TclModule.path, self.spec.architecture, self.use_name)
 
     @property
-    def use_name(self):
-      return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
-                                 self.spec.compiler.name,
-                                 self.spec.compiler.version,
-                                 self.spec.dag_hash())
-
-    def write_header(self, module_file):
+    def header(self):
+        timestamp = datetime.datetime.now()
         # TCL Modulefile header
-        module_file.write('#%Module1.0\n')
+        header = '#%Module1.0\n'
+        header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp  # NOQA: ignore=E501
+        header += '##\n'
+        header += '## %s\n' % self.spec.short_spec
+        header += '##\n'
+
         # TODO : category ?
         # Short description
         if self.short_description:
-            module_file.write('module-whatis \"%s\"\n\n' % self.short_description)
+            header += 'module-whatis \"%s\"\n\n' % self.short_description
 
         # Long description
         if self.long_description:
-            module_file.write('proc ModulesHelp { } {\n')
+            header += 'proc ModulesHelp { } {\n'
             for line in textwrap.wrap(self.long_description, 72):
-                module_file.write("puts stderr \"%s\"\n" % line)
-            module_file.write('}\n\n')
+                header += 'puts stderr "%s"\n' % line
+            header += '}\n\n'
+        return header
+
+    def module_specific_content(self, configuration):
+        naming_tokens = self.tokens
+        # Conflict
+        conflict_format = configuration.get('conflict', [])
+        f = string.Formatter()
+        for item in conflict_format:
+            line = 'conflict ' + item + '\n'
+            if len([x for x in f.parse(line)
+                    ]) > 1:  # We do have placeholder to substitute
+                for naming_dir, conflict_dir in zip(
+                        self.naming_scheme.split('/'), item.split('/')):
+                    if naming_dir != conflict_dir:
+                        message = 'conflict scheme does not match naming scheme [{spec}]\n\n'  # NOQA: ignore=E501
+                        message += 'naming scheme   : "{nformat}"\n'
+                        message += 'conflict scheme : "{cformat}"\n\n'
+                        message += '** You may want to check your `modules.yaml` configuration file **\n'  # NOQA: ignore=E501
+                        tty.error(message.format(spec=self.spec,
+                                                 nformat=self.naming_scheme,
+                                                 cformat=item))
+                        raise SystemExit('Module generation aborted.')
+                line = line.format(**naming_tokens)
+            yield line
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 4065553131859d19f4a4e8da17dd29b019370cc3..8e6cf3295418c938d7f45848dbbdcc27cc959cf9 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -37,7 +37,6 @@
 import re
 import textwrap
 import time
-import glob
 
 import llnl.util.tty as tty
 import spack
@@ -62,7 +61,6 @@
 from spack.util.executable import ProcessError
 from spack.version import *
 from urlparse import urlparse
-
 """Allowed URL schemes for spack packages."""
 _ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
 
@@ -305,26 +303,21 @@ class SomePackage(Package):
     #
     """By default we build in parallel.  Subclasses can override this."""
     parallel = True
-
     """# jobs to use for parallel make. If set, overrides default of ncpus."""
     make_jobs = None
-
-    """Most packages are NOT extendable.  Set to True if you want extensions."""
+    """Most packages are NOT extendable. Set to True if you want extensions."""
     extendable = False
-
     """List of prefix-relative file paths (or a single path). If these do
        not exist after install, or if they exist but are not files,
        sanity checks fail.
     """
     sanity_check_is_file = []
-
     """List of prefix-relative directory paths (or a single path). If
        these do not exist after install, or if they exist but are not
        directories, sanity checks will fail.
     """
     sanity_check_is_dir = []
 
-
     def __init__(self, spec):
         # this determines how the package should be built.
         self.spec = spec
@@ -336,23 +329,24 @@ def __init__(self, spec):
             self.name = self.name[self.name.rindex('.') + 1:]
 
         # Allow custom staging paths for packages
-        self.path=None
+        self.path = None
 
         # Sanity check attributes required by Spack directives.
         spack.directives.ensure_dicts(type(self))
 
         # Check versions in the versions dict.
         for v in self.versions:
-            assert(isinstance(v, Version))
+            assert (isinstance(v, Version))
 
         # Check version descriptors
         for v in sorted(self.versions):
-            assert(isinstance(self.versions[v], dict))
+            assert (isinstance(self.versions[v], dict))
 
         # Version-ize the keys in versions dict
         try:
-            self.versions = dict((Version(v), h) for v,h in self.versions.items())
-        except ValueError, e:
+            self.versions = dict((Version(v), h)
+                                 for v, h in self.versions.items())
+        except ValueError as e:
             raise ValueError("In package %s: %s" % (self.name, e.message))
 
         # stage used to build this package.
@@ -366,9 +360,9 @@ def __init__(self, spec):
         # This makes self.url behave sanely.
         if self.spec.versions.concrete:
             # TODO: this is a really roundabout way of determining the type
-            # TODO: of fetch to do. figure out a more sane fetch strategy/package
-            # TODO: init order (right now it's conflated with stage, package, and
-            # TODO: the tests make assumptions)
+            # TODO: of fetch to do. figure out a more sane fetch
+            # TODO: strategy/package init order (right now it's conflated with
+            # TODO: stage, package, and the tests make assumptions)
             f = fs.for_package_version(self, self.version)
             if isinstance(f, fs.URLFetchStrategy):
                 self.url = self.url_for_version(self.spec.version)
@@ -387,14 +381,12 @@ def __init__(self, spec):
         if self.is_extension:
             spack.repo.get(self.extendee_spec)._check_extendable()
 
-
     @property
     def version(self):
         if not self.spec.versions.concrete:
             raise ValueError("Can only get of package with concrete version.")
         return self.spec.versions[0]
 
-
     @memoized
     def version_urls(self):
         """Return a list of URLs for different versions of this
@@ -407,7 +399,6 @@ def version_urls(self):
                 version_urls[v] = args['url']
         return version_urls
 
-
     def nearest_url(self, version):
         """Finds the URL for the next lowest version with a URL.
            If there is no lower version with a URL, uses the
@@ -424,10 +415,11 @@ def nearest_url(self, version):
                 url = version_urls[v]
         return url
 
-
     # TODO: move this out of here and into some URL extrapolation module?
     def url_for_version(self, version):
-        """Returns a URL that you can download a new version of this package from."""
+        """
+        Returns a URL that you can download a new version of this package from.
+        """
         if not isinstance(version, Version):
             version = Version(version)
 
@@ -441,14 +433,17 @@ def url_for_version(self, version):
             return version_urls[version]
 
         # If we have no idea, try to substitute the version.
-        return spack.url.substitute_version(self.nearest_url(version),
-                                            self.url_version(version))
+        return spack.url.substitute_version(
+            self.nearest_url(version), self.url_version(version))
 
     def _make_resource_stage(self, root_stage, fetcher, resource):
         resource_stage_folder = self._resource_stage(resource)
         resource_mirror = join_path(self.name, os.path.basename(fetcher.url))
-        stage = ResourceStage(resource.fetcher, root=root_stage, resource=resource,
-                              name=resource_stage_folder, mirror_path=resource_mirror,
+        stage = ResourceStage(resource.fetcher,
+                              root=root_stage,
+                              resource=resource,
+                              name=resource_stage_folder,
+                              mirror_path=resource_mirror,
                               path=self.path)
         return stage
 
@@ -474,7 +469,8 @@ def _make_stage(self):
             else:
                 # Construct resource stage
                 resource = resources[ii - 1]  # ii == 0 is root!
-                stage = self._make_resource_stage(composite_stage[0], fetcher, resource)
+                stage = self._make_resource_stage(composite_stage[0], fetcher,
+                                                  resource)
             # Append the item to the composite
             composite_stage.append(stage)
 
@@ -492,13 +488,11 @@ def stage(self):
             self._stage = self._make_stage()
         return self._stage
 
-
     @stage.setter
     def stage(self, stage):
         """Allow a stage object to be set to override the default."""
         self._stage = stage
 
-
     def _make_fetcher(self):
         # Construct a composite fetcher that always contains at least
         # one element (the root package). In case there are resources
@@ -515,7 +509,8 @@ def _make_fetcher(self):
     @property
     def fetcher(self):
         if not self.spec.versions.concrete:
-            raise ValueError("Can only get a fetcher for a package with concrete versions.")
+            raise ValueError(
+                "Can only get a fetcher for a package with concrete versions.")
         if not self._fetcher:
             self._fetcher = self._make_fetcher()
         return self._fetcher
@@ -524,10 +519,11 @@ def fetcher(self):
     def fetcher(self, f):
         self._fetcher = f
 
-
     @property
     def extendee_spec(self):
-        """Spec of the extendee of this package, or None if it is not an extension."""
+        """
+        Spec of the extendee of this package, or None if it is not an extension
+        """
         if not self.extendees:
             return None
 
@@ -549,10 +545,11 @@ def extendee_spec(self):
             spec, kwargs = self.extendees[name]
             return spec
 
-
     @property
     def extendee_args(self):
-        """Spec of the extendee of this package, or None if it is not an extension."""
+        """
+        Spec of the extendee of this package, or None if it is not an extension
+        """
         if not self.extendees:
             return None
 
@@ -560,7 +557,6 @@ def extendee_args(self):
         name = next(iter(self.extendees))
         return self.extendees[name][1]
 
-
     @property
     def is_extension(self):
         # if it is concrete, it's only an extension if it actually
@@ -571,22 +567,20 @@ def is_extension(self):
             # If not, then it's an extension if it *could* be an extension
             return bool(self.extendees)
 
-
     def extends(self, spec):
-        if not spec.name in self.extendees:
+        if spec.name not in self.extendees:
             return False
         s = self.extendee_spec
         return s and s.satisfies(spec)
 
-
     @property
     def activated(self):
         if not self.is_extension:
-            raise ValueError("is_extension called on package that is not an extension.")
+            raise ValueError(
+                "is_extension called on package that is not an extension.")
         exts = spack.install_layout.extension_map(self.extendee_spec)
         return (self.name in exts) and (exts[self.name] == self.spec)
 
-
     def preorder_traversal(self, visited=None, **kwargs):
         """This does a preorder traversal of the package's dependence DAG."""
         virtual = kwargs.get("virtual", False)
@@ -605,36 +599,35 @@ def preorder_traversal(self, visited=None, **kwargs):
             spec = self.dependencies[name]
 
             # currently, we do not descend into virtual dependencies, as this
-            # makes doing a sensible traversal much harder.  We just assume that
-            # ANY of the virtual deps will work, which might not be true (due to
-            # conflicts or unsatisfiable specs).  For now this is ok but we might
-            # want to reinvestigate if we start using a lot of complicated virtual
-            # dependencies
+            # makes doing a sensible traversal much harder.  We just assume
+            # that ANY of the virtual deps will work, which might not be true
+            # (due to conflicts or unsatisfiable specs).  For now this is ok
+            # but we might want to reinvestigate if we start using a lot of
+            # complicated virtual dependencies
             # TODO: reinvestigate this.
             if spec.virtual:
                 if virtual:
                     yield spec
                 continue
 
-            for pkg in spack.repo.get(name).preorder_traversal(visited, **kwargs):
+            for pkg in spack.repo.get(name).preorder_traversal(visited,
+                                                               **kwargs):
                 yield pkg
 
-
     def provides(self, vpkg_name):
-        """True if this package provides a virtual package with the specified name."""
+        """
+        True if this package provides a virtual package with the specified name
+        """
         return any(s.name == vpkg_name for s in self.provided)
 
-
     def virtual_dependencies(self, visited=None):
         for spec in sorted(set(self.preorder_traversal(virtual=True))):
             yield spec
 
-
     @property
     def installed(self):
         return os.path.isdir(self.prefix)
 
-
     @property
     def installed_dependents(self):
         """Return a list of the specs of all installed packages that depend
@@ -651,60 +644,62 @@ def installed_dependents(self):
                     dependents.append(spec)
         return dependents
 
-
     @property
     def prefix(self):
         """Get the prefix into which this package should be installed."""
         return self.spec.prefix
 
-
     @property
     def compiler(self):
-        """Get the spack.compiler.Compiler object used to build this package."""
+        """Get the spack.compiler.Compiler object used to build this package"""
         if not self.spec.concrete:
             raise ValueError("Can only get a compiler for a concrete package.")
         return spack.compilers.compiler_for_spec(self.spec.compiler)
 
-
     def url_version(self, version):
-        """Given a version, this returns a string that should be substituted into the
-           package's URL to download that version.
-           By default, this just returns the version string. Subclasses may need to
-           override this, e.g. for boost versions where you need to ensure that there
-           are _'s in the download URL.
         """
-        return str(version)
+        Given a version, this returns a string that should be substituted
+        into the package's URL to download that version.
 
+        By default, this just returns the version string. Subclasses may need
+        to override this, e.g. for boost versions where you need to ensure that
+        there are _'s in the download URL.
+        """
+        return str(version)
 
     def remove_prefix(self):
-        """Removes the prefix for a package along with any empty parent directories."""
+        """
+        Removes the prefix for a package along with any empty parent
+        directories
+        """
         spack.install_layout.remove_install_directory(self.spec)
 
-
     def do_fetch(self, mirror_only=False):
-        """Creates a stage directory and downloads the tarball for this package.
-           Working directory will be set to the stage directory.
+        """
+        Creates a stage directory and downloads the tarball for this package.
+        Working directory will be set to the stage directory.
         """
         if not self.spec.concrete:
             raise ValueError("Can only fetch concrete packages.")
 
         start_time = time.time()
-        if spack.do_checksum and not self.version in self.versions:
-            tty.warn("There is no checksum on file to fetch %s safely."
-                     % self.spec.format('$_$@'))
+        if spack.do_checksum and self.version not in self.versions:
+            tty.warn("There is no checksum on file to fetch %s safely." %
+                     self.spec.format('$_$@'))
 
             # Ask the user whether to skip the checksum if we're
             # interactive, but just fail if non-interactive.
-            checksum_msg = "Add a checksum or use --no-checksum to skip this check."
+            checksum_msg = "Add a checksum or use --no-checksum to skip this check."  # NOQA: ignore=E501
             ignore_checksum = False
             if sys.stdout.isatty():
-                ignore_checksum = tty.get_yes_or_no("  Fetch anyway?", default=False)
+                ignore_checksum = tty.get_yes_or_no("  Fetch anyway?",
+                                                    default=False)
                 if ignore_checksum:
                     tty.msg("Fetching with no checksum.", checksum_msg)
 
             if not ignore_checksum:
-                raise FetchError(
-                    "Will not fetch %s" % self.spec.format('$_$@'), checksum_msg)
+                raise FetchError("Will not fetch %s" %
+                                 self.spec.format('$_$@'), checksum_msg)
 
         self.stage.fetch(mirror_only)
 
@@ -723,7 +718,6 @@ def do_stage(self, mirror_only=False):
         self.stage.expand_archive()
         self.stage.chdir_to_source()
 
-
     def do_patch(self):
         """Calls do_stage(), then applied patches to the expanded tarball if they
            haven't been applied already."""
@@ -743,10 +737,10 @@ def do_patch(self):
 
         # Construct paths to special files in the archive dir used to
         # keep track of whether patches were successfully applied.
-        archive_dir     = self.stage.source_path
-        good_file       = join_path(archive_dir, '.spack_patched')
+        archive_dir = self.stage.source_path
+        good_file = join_path(archive_dir, '.spack_patched')
         no_patches_file = join_path(archive_dir, '.spack_no_patches')
-        bad_file        = join_path(archive_dir, '.spack_patch_failed')
+        bad_file = join_path(archive_dir, '.spack_patch_failed')
 
         # If we encounter an archive that failed to patch, restage it
         # so that we can apply all the patches again.
@@ -801,13 +795,11 @@ def do_patch(self):
         else:
             touch(no_patches_file)
 
-
     @property
     def namespace(self):
         namespace, dot, module = self.__module__.rpartition('.')
         return namespace
 
-
     def do_fake_install(self):
         """Make a fake install directory contaiing a 'fake' file in bin."""
         mkdirp(self.prefix.bin)
@@ -815,15 +807,15 @@ def do_fake_install(self):
         mkdirp(self.prefix.lib)
         mkdirp(self.prefix.man1)
 
-
     def _get_needed_resources(self):
         resources = []
         # Select the resources that are needed for this build
         for when_spec, resource_list in self.resources.items():
             if when_spec in self.spec:
                 resources.extend(resource_list)
-        # Sorts the resources by the length of the string representing their destination. Since any nested resource
-        # must contain another resource's name in its path, it seems that should work
+        # Sorts the resources by the length of the string representing their
+        # destination. Since any nested resource must contain another
+        # resource's name in its path, it seems that should work
         resources = sorted(resources, key=lambda res: len(res.destination))
         return resources
 
@@ -832,10 +824,14 @@ def _resource_stage(self, resource):
         resource_stage_folder = '-'.join(pieces)
         return resource_stage_folder
 
-
     def do_install(self,
-                   keep_prefix=False,  keep_stage=False, ignore_deps=False,
-                   skip_patch=False, verbose=False, make_jobs=None, fake=False):
+                   keep_prefix=False,
+                   keep_stage=False,
+                   ignore_deps=False,
+                   skip_patch=False,
+                   verbose=False,
+                   make_jobs=None,
+                   fake=False):
         """Called by commands to install a package and its dependencies.
 
         Package implementations should override install() to describe
@@ -846,18 +842,20 @@ def do_install(self,
         keep_stage  -- By default, stage is destroyed only if there are no
                        exceptions during build. Set to True to keep the stage
                        even with exceptions.
-        ignore_deps -- Do not install dependencies before installing this package.
+        ignore_deps -- Don't install dependencies before installing this
+                       package
         fake        -- Don't really build -- install fake stub files instead.
         skip_patch  -- Skip patch stage of build if True.
         verbose     -- Display verbose build output (by default, suppresses it)
-        make_jobs   -- Number of make jobs to use for install.  Default is ncpus.
+        make_jobs   -- Number of make jobs to use for install. Default is ncpus
         """
         if not self.spec.concrete:
             raise ValueError("Can only install concrete packages.")
 
         # No installation needed if package is external
         if self.spec.external:
-            tty.msg("%s is externally installed in %s" % (self.name, self.spec.external))
+            tty.msg("%s is externally installed in %s" %
+                    (self.name, self.spec.external))
             return
 
         # Ensure package is not already installed
@@ -869,9 +867,13 @@ def do_install(self,
 
         # First, install dependencies recursively.
         if not ignore_deps:
-            self.do_install_dependencies(
-                keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps,
-                fake=fake, skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs)
+            self.do_install_dependencies(keep_prefix=keep_prefix,
+                                         keep_stage=keep_stage,
+                                         ignore_deps=ignore_deps,
+                                         fake=fake,
+                                         skip_patch=skip_patch,
+                                         verbose=verbose,
+                                         make_jobs=make_jobs)
 
         # Set parallelism before starting build.
         self.make_jobs = make_jobs
@@ -899,35 +901,41 @@ def build_process():
                     self.do_fake_install()
                 else:
                     # Do the real install in the source directory.
-                     self.stage.chdir_to_source()
+                    self.stage.chdir_to_source()
 
-                     # Save the build environment in a file before building.
-                     env_path = join_path(os.getcwd(), 'spack-build.env')
+                    # Save the build environment in a file before building.
+                    env_path = join_path(os.getcwd(), 'spack-build.env')
 
-                     try:
-                        # Redirect I/O to a build log (and optionally to the terminal)
+                    try:
+                        # Redirect I/O to a build log (and optionally to
+                        # the terminal)
                         log_path = join_path(os.getcwd(), 'spack-build.out')
                         log_file = open(log_path, 'w')
-                        with log_output(log_file, verbose, sys.stdout.isatty(), True):
+                        with log_output(log_file, verbose, sys.stdout.isatty(),
+                                        True):
                             dump_environment(env_path)
                             self.install(self.spec, self.prefix)
 
-                     except ProcessError as e:
-                         # Annotate ProcessErrors with the location of the build log.
-                         e.build_log = log_path
-                         raise e
+                    except ProcessError as e:
+                        # Annotate ProcessErrors with the location of
+                        # the build log
+                        e.build_log = log_path
+                        raise e
 
-                     # Ensure that something was actually installed.
-                     self.sanity_check_prefix()
+                    # Ensure that something was actually installed.
+                    self.sanity_check_prefix()
 
-                     # Copy provenance into the install directory on success
-                     log_install_path = spack.install_layout.build_log_path(self.spec)
-                     env_install_path = spack.install_layout.build_env_path(self.spec)
-                     packages_dir = spack.install_layout.build_packages_path(self.spec)
+                    # Copy provenance into the install directory on success
+                    log_install_path = spack.install_layout.build_log_path(
+                        self.spec)
+                    env_install_path = spack.install_layout.build_env_path(
+                        self.spec)
+                    packages_dir = spack.install_layout.build_packages_path(
+                        self.spec)
 
-                     install(log_path, log_install_path)
-                     install(env_path, env_install_path)
-                     dump_packages(self.spec, packages_dir)
+                    install(log_path, log_install_path)
+                    install(env_path, env_install_path)
+                    dump_packages(self.spec, packages_dir)
 
                 # Run post install hooks before build stage is removed.
                 spack.hooks.post_install(self)
@@ -937,8 +945,9 @@ def build_process():
             build_time = self._total_time - self._fetch_time
 
             tty.msg("Successfully installed %s" % self.name,
-                    "Fetch: %s.  Build: %s.  Total: %s."
-                    % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
+                    "Fetch: %s.  Build: %s.  Total: %s." %
+                    (_hms(self._fetch_time), _hms(build_time),
+                     _hms(self._total_time)))
             print_pkg(self.prefix)
 
         try:
@@ -953,16 +962,17 @@ def build_process():
                 tty.warn("Keeping install prefix in place despite error.",
                          "Spack will think this package is installed. " +
                          "Manually remove this directory to fix:",
-                         self.prefix, wrap=True)
+                         self.prefix,
+                         wrap=True)
             raise
 
         # note: PARENT of the build process adds the new package to
         # the database, so that we don't need to re-read from file.
         spack.installed_db.add(self.spec, self.prefix)
 
-
     def sanity_check_prefix(self):
         """This function checks whether install succeeded."""
+
         def check_paths(path_list, filetype, predicate):
             if isinstance(path_list, basestring):
                 path_list = [path_list]
@@ -970,8 +980,9 @@ def check_paths(path_list, filetype, predicate):
             for path in path_list:
                 abs_path = os.path.join(self.prefix, path)
                 if not predicate(abs_path):
-                    raise InstallError("Install failed for %s. No such %s in prefix: %s"
-                                       % (self.name, filetype, path))
+                    raise InstallError(
+                        "Install failed for %s. No such %s in prefix: %s" %
+                        (self.name, filetype, path))
 
         check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
         check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
@@ -982,13 +993,11 @@ def check_paths(path_list, filetype, predicate):
             raise InstallError(
                 "Install failed for %s.  Nothing was installed!" % self.name)
 
-
     def do_install_dependencies(self, **kwargs):
         # Pass along paths of dependencies here
         for dep in self.spec.dependencies.values():
             dep.package.do_install(**kwargs)
 
-
     @property
     def build_log_path(self):
         if self.installed:
@@ -996,7 +1005,6 @@ def build_log_path(self):
         else:
             return join_path(self.stage.source_path, 'spack-build.out')
 
-
     @property
     def module(self):
         """Use this to add variables to the class's module's scope.
@@ -1006,7 +1014,7 @@ def module(self):
                           fromlist=[self.__class__.__name__])
 
     def setup_environment(self, spack_env, run_env):
-        """Set up the compile and runtime environemnts for a package.
+        """Set up the compile and runtime environments for a package.
 
         `spack_env` and `run_env` are `EnvironmentModifications`
         objects.  Package authors can call methods on them to alter
@@ -1037,7 +1045,6 @@ def setup_environment(self, spack_env, run_env):
         """
         pass
 
-
     def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
         """Set up the environment of packages that depend on this one.
 
@@ -1077,7 +1084,6 @@ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
         """
         self.setup_environment(spack_env, run_env)
 
-
     def setup_dependent_package(self, module, dependent_spec):
         """Set up Python module-scope variables for dependent packages.
 
@@ -1123,8 +1129,11 @@ def setup_dependent_package(self, module, dependent_spec):
         pass
 
     def install(self, spec, prefix):
-        """Package implementations override this with their own build configuration."""
-        raise InstallError("Package %s provides no install method!" % self.name)
+        """
+        Package implementations override this with their own configuration
+        """
+        raise InstallError("Package %s provides no install method!" %
+                           self.name)
 
     def do_uninstall(self, force=False):
         if not self.installed:
@@ -1146,12 +1155,10 @@ def do_uninstall(self, force=False):
         # Once everything else is done, run post install hooks
         spack.hooks.post_uninstall(self)
 
-
     def _check_extendable(self):
         if not self.extendable:
             raise ValueError("Package %s is not extendable!" % self.name)
 
-
     def _sanity_check_extension(self):
         if not self.is_extension:
             raise ActivationError("This package is not an extension.")
@@ -1160,12 +1167,13 @@ def _sanity_check_extension(self):
         extendee_package._check_extendable()
 
         if not extendee_package.installed:
-            raise ActivationError("Can only (de)activate extensions for installed packages.")
+            raise ActivationError(
+                "Can only (de)activate extensions for installed packages.")
         if not self.installed:
             raise ActivationError("Extensions must first be installed.")
-        if not self.extendee_spec.name in self.extendees:
-            raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
-
+        if self.extendee_spec.name not in self.extendees:
+            raise ActivationError("%s does not extend %s!" %
+                                  (self.name, self.extendee.name))
 
     def do_activate(self, force=False):
         """Called on an etension to invoke the extendee's activate method.
@@ -1175,8 +1183,8 @@ def do_activate(self, force=False):
         """
         self._sanity_check_extension()
 
-        spack.install_layout.check_extension_conflict(
-            self.extendee_spec, self.spec)
+        spack.install_layout.check_extension_conflict(self.extendee_spec,
+                                                      self.spec)
 
         # Activate any package dependencies that are also extensions.
         if not force:
@@ -1188,9 +1196,8 @@ def do_activate(self, force=False):
         self.extendee_spec.package.activate(self, **self.extendee_args)
 
         spack.install_layout.add_extension(self.extendee_spec, self.spec)
-        tty.msg("Activated extension %s for %s"
-                % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
-
+        tty.msg("Activated extension %s for %s" %
+                (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
 
     def activate(self, extension, **kwargs):
         """Symlinks all files from the extension into extendee's install dir.
@@ -1201,6 +1208,7 @@ def activate(self, extension, **kwargs):
         always executed.
 
         """
+
         def ignore(filename):
             return (filename in spack.install_layout.hidden_file_paths or
                     kwargs.get('ignore', lambda f: False)(filename))
@@ -1212,7 +1220,6 @@ def ignore(filename):
 
         tree.merge(self.prefix, ignore=ignore)
 
-
     def do_deactivate(self, **kwargs):
         """Called on the extension to invoke extendee's deactivate() method."""
         self._sanity_check_extension()
@@ -1230,7 +1237,7 @@ def do_deactivate(self, **kwargs):
                 for dep in aspec.traverse():
                     if self.spec == dep:
                         raise ActivationError(
-                            "Cannot deactivate %s beacuse %s is activated and depends on it."
+                            "Cannot deactivate %s because %s is activated and depends on it."  # NOQA: ignore=E501
                             % (self.spec.short_spec, aspec.short_spec))
 
         self.extendee_spec.package.deactivate(self, **self.extendee_args)
@@ -1238,11 +1245,11 @@ def do_deactivate(self, **kwargs):
         # redundant activation check -- makes SURE the spec is not
         # still activated even if something was wrong above.
         if self.activated:
-            spack.install_layout.remove_extension(self.extendee_spec, self.spec)
-
-        tty.msg("Deactivated extension %s for %s"
-                % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
+            spack.install_layout.remove_extension(self.extendee_spec,
+                                                  self.spec)
 
+        tty.msg("Deactivated extension %s for %s" %
+                (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
 
     def deactivate(self, extension, **kwargs):
         """Unlinks all files from extension out of this package's install dir.
@@ -1253,6 +1260,7 @@ def deactivate(self, extension, **kwargs):
         always executed.
 
         """
+
         def ignore(filename):
             return (filename in spack.install_layout.hidden_file_paths or
                     kwargs.get('ignore', lambda f: False)(filename))
@@ -1260,17 +1268,14 @@ def ignore(filename):
         tree = LinkTree(extension.prefix)
         tree.unmerge(self.prefix, ignore=ignore)
 
-
     def do_restage(self):
         """Reverts expanded/checked out source to a pristine state."""
         self.stage.restage()
 
-
     def do_clean(self):
         """Removes the package's build stage and source tarball."""
         self.stage.destroy()
 
-
     def format_doc(self, **kwargs):
         """Wrap doc string at 72 characters and format nicely"""
         indent = kwargs.get('indent', 0)
@@ -1285,7 +1290,6 @@ def format_doc(self, **kwargs):
             results.write((" " * indent) + line + "\n")
         return results.getvalue()
 
-
     @property
     def all_urls(self):
         urls = []
@@ -1297,7 +1301,6 @@ def all_urls(self):
                 urls.append(args['url'])
         return urls
 
-
     def fetch_remote_versions(self):
         """Try to find remote versions of this package using the
            list_url and any other URLs described in the package file."""
@@ -1306,26 +1309,30 @@ def fetch_remote_versions(self):
 
         try:
             return spack.util.web.find_versions_of_archive(
-                *self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
+                *self.all_urls,
+                list_url=self.list_url,
+                list_depth=self.list_depth)
         except spack.error.NoNetworkConnectionError as e:
-            tty.die("Package.fetch_versions couldn't connect to:",
-                    e.url, e.message)
-
+            tty.die("Package.fetch_versions couldn't connect to:", e.url,
+                    e.message)
 
     @property
     def rpath(self):
         """Get the rpath this package links with, as a list of paths."""
         rpaths = [self.prefix.lib, self.prefix.lib64]
-        rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
+        rpaths.extend(d.prefix.lib
+                      for d in self.spec.traverse(root=False)
                       if os.path.isdir(d.prefix.lib))
-        rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
+        rpaths.extend(d.prefix.lib64
+                      for d in self.spec.traverse(root=False)
                       if os.path.isdir(d.prefix.lib64))
         return rpaths
 
-
     @property
     def rpath_args(self):
-        """Get the rpath args as a string, with -Wl,-rpath, for each element."""
+        """
+        Get the rpath args as a string, with -Wl,-rpath, for each element
+        """
         return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
 
 
@@ -1333,6 +1340,7 @@ def install_dependency_symlinks(pkg, spec, prefix):
     """Execute a dummy install and flatten dependencies"""
     flatten_dependencies(spec, prefix)
 
+
 def flatten_dependencies(spec, flat_dir):
     """Make each dependency of spec present in dir via symlink."""
     for dep in spec.traverse(root=False):
@@ -1341,13 +1349,13 @@ def flatten_dependencies(spec, flat_dir):
         dep_path = spack.install_layout.path_for_spec(dep)
         dep_files = LinkTree(dep_path)
 
-        os.mkdir(flat_dir+'/'+name)
+        os.mkdir(flat_dir + '/' + name)
 
-        conflict = dep_files.find_conflict(flat_dir+'/'+name)
+        conflict = dep_files.find_conflict(flat_dir + '/' + name)
         if conflict:
             raise DependencyConflictError(conflict)
 
-        dep_files.merge(flat_dir+'/'+name)
+        dep_files.merge(flat_dir + '/' + name)
 
 
 def validate_package_url(url_string):
@@ -1388,9 +1396,11 @@ def dump_packages(spec, path):
             # Create a source repo and get the pkg directory out of it.
             try:
                 source_repo = spack.repository.Repo(source_repo_root)
-                source_pkg_dir = source_repo.dirname_for_package_name(node.name)
-            except RepoError as e:
-                tty.warn("Warning: Couldn't copy in provenance for %s" % node.name)
+                source_pkg_dir = source_repo.dirname_for_package_name(
+                    node.name)
+            except RepoError:
+                tty.warn("Warning: Couldn't copy in provenance for %s" %
+                         node.name)
 
         # Create a destination repository
         dest_repo_root = join_path(path, node.namespace)
@@ -1410,7 +1420,7 @@ def print_pkg(message):
     """Outputs a message with a package icon."""
     from llnl.util.tty.color import cwrite
     cwrite('@*g{[+]} ')
-    print message
+    print(message)
 
 
 def _hms(seconds):
@@ -1419,20 +1429,25 @@ def _hms(seconds):
     h, m = divmod(m, 60)
 
     parts = []
-    if h: parts.append("%dh" % h)
-    if m: parts.append("%dm" % m)
-    if s: parts.append("%.2fs" % s)
+    if h:
+        parts.append("%dh" % h)
+    if m:
+        parts.append("%dm" % m)
+    if s:
+        parts.append("%.2fs" % s)
     return ' '.join(parts)
 
 
 class FetchError(spack.error.SpackError):
     """Raised when something goes wrong during fetch."""
+
     def __init__(self, message, long_msg=None):
         super(FetchError, self).__init__(message, long_msg)
 
 
 class InstallError(spack.error.SpackError):
     """Raised when something goes wrong during install or uninstall."""
+
     def __init__(self, message, long_msg=None):
         super(InstallError, self).__init__(message, long_msg)
 
@@ -1443,21 +1458,24 @@ class ExternalPackageError(InstallError):
 
 class PackageStillNeededError(InstallError):
     """Raised when package is still needed by another on uninstall."""
+
     def __init__(self, spec, dependents):
-        super(PackageStillNeededError, self).__init__(
-            "Cannot uninstall %s" % spec)
+        super(PackageStillNeededError, self).__init__("Cannot uninstall %s" %
+                                                      spec)
         self.spec = spec
         self.dependents = dependents
 
 
 class PackageError(spack.error.SpackError):
     """Raised when something is wrong with a package definition."""
+
     def __init__(self, message, long_msg=None):
         super(PackageError, self).__init__(message, long_msg)
 
 
 class PackageVersionError(PackageError):
     """Raised when a version URL cannot automatically be determined."""
+
     def __init__(self, version):
         super(PackageVersionError, self).__init__(
             "Cannot determine a URL automatically for version %s" % version,
@@ -1466,6 +1484,7 @@ def __init__(self, version):
 
 class VersionFetchError(PackageError):
     """Raised when a version URL cannot automatically be determined."""
+
     def __init__(self, cls):
         super(VersionFetchError, self).__init__(
             "Cannot fetch versions for package %s " % cls.__name__ +
@@ -1474,12 +1493,15 @@ def __init__(self, cls):
 
 class NoURLError(PackageError):
     """Raised when someone tries to build a URL for a package with no URLs."""
+
     def __init__(self, cls):
         super(NoURLError, self).__init__(
             "Package %s has no version with a URL." % cls.__name__)
 
 
-class ExtensionError(PackageError): pass
+class ExtensionError(PackageError):
+
+    pass
 
 
 class ExtensionConflictError(ExtensionError):
@@ -1495,7 +1517,8 @@ def __init__(self, msg, long_msg=None):
 
 class DependencyConflictError(spack.error.SpackError):
     """Raised when the dependencies cannot be flattened as asked for."""
+
     def __init__(self, conflict):
         super(DependencyConflictError, self).__init__(
-            "%s conflicts with another file in the flattened directory." %(
+            "%s conflicts with another file in the flattened directory." % (
                 conflict))
diff --git a/lib/spack/spack/package_test.py b/lib/spack/spack/package_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c15e3f5d01d669598e244101fb491be77bd1cc7
--- /dev/null
+++ b/lib/spack/spack/package_test.py
@@ -0,0 +1,66 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+def compile_c_and_execute(source_file, include_flags, link_flags):
+    """Compile C @p source_file with @p include_flags and @p link_flags,
+    run and return the output.
+    """
+    cc = which('cc')
+    flags = include_flags
+    flags.extend([source_file])
+    cc('-c', *flags)
+    name = os.path.splitext(os.path.basename(source_file))[0]
+    cc('-o', "check", "%s.o" % name,
+       *link_flags)
+
+    check = Executable('./check')
+    return check(return_output=True)
+
+
+def compare_output(current_output, blessed_output):
+    """Compare blessed and current output of executables."""
+    if not (current_output == blessed_output):
+        print "Produced output does not match expected output."
+        print "Expected output:"
+        print '-' * 80
+        print blessed_output
+        print '-' * 80
+        print "Produced output:"
+        print '-' * 80
+        print current_output
+        print '-' * 80
+        raise RuntimeError("Ouput check failed.",
+                           "See spack_output.log for details")
+
+
+def compare_output_file(current_output, blessed_output_file):
+    """Same as above, but when the blessed output is given as a file."""
+    with open(blessed_output_file, 'r') as f:
+        blessed_output = f.read()
+
+    compare_output(current_output, blessed_output)
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 3c5edde66b94ca7ff032b964cc3ef2a5b4af8f03..10eaac134411a01f04cafa375860b993c26a9d38 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -23,52 +23,23 @@
 # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
 ##############################################################################
 import sys
-import unittest
-import nose
 
-from spack.test.tally_plugin import Tally
-from llnl.util.filesystem import join_path
 import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
-
+import nose
 import spack
-
+from llnl.util.filesystem import join_path
+from llnl.util.tty.colify import colify
+from spack.test.tally_plugin import Tally
 """Names of tests to be included in Spack's test suite"""
-test_names = ['versions',
-              'url_parse',
-              'url_substitution',
-              'packages',
-              'stage',
-              'spec_syntax',
-              'spec_semantics',
-              'spec_dag',
-              'concretize',
-              'multimethod',
-              'install',
-              'package_sanity',
-              'config',
-              'directory_layout',
-              'pattern',
-              'python_version',
-              'git_fetch',
-              'svn_fetch',
-              'hg_fetch',
-              'mirror',
-              'url_extrapolate',
-              'cc',
-              'link_tree',
-              'spec_yaml',
-              'optional_deps',
-              'make_executable',
-              'configure_guess',
-              'lock',
-              'database',
-              'namespace_trie',
-              'yaml',
-              'sbang',
-              'environment',
-              'cmd.uninstall',
-              'cmd.test_install']
+test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
+              'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
+              'multimethod', 'install', 'package_sanity', 'config',
+              'directory_layout', 'pattern', 'python_version', 'git_fetch',
+              'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
+              'cc', 'link_tree', 'spec_yaml', 'optional_deps',
+              'make_executable', 'configure_guess', 'lock', 'database',
+              'namespace_trie', 'yaml', 'sbang', 'environment',
+              'cmd.uninstall', 'cmd.test_install']
 
 
 def list_tests():
@@ -79,8 +50,6 @@ def list_tests():
 def run(names, outputDir, verbose=False):
     """Run tests with the supplied names.  Names should be a list.  If
        it's empty, run ALL of Spack's tests."""
-    verbosity = 1 if not verbose else 2
-
     if not names:
         names = test_names
     else:
@@ -94,7 +63,7 @@ def run(names, outputDir, verbose=False):
     tally = Tally()
     for test in names:
         module = 'spack.test.' + test
-        print module
+        print(module)
 
         tty.msg("Running test: %s" % test)
 
@@ -104,15 +73,13 @@ def run(names, outputDir, verbose=False):
             xmlOutputFname = "unittests-{0}.xml".format(test)
             xmlOutputPath = join_path(outputDir, xmlOutputFname)
             runOpts += ["--with-xunit",
-                "--xunit-file={0}".format(xmlOutputPath)]
+                        "--xunit-file={0}".format(xmlOutputPath)]
         argv = [""] + runOpts + [module]
-        result = nose.run(argv=argv, addplugins=[tally])
+        nose.run(argv=argv, addplugins=[tally])
 
     succeeded = not tally.failCount and not tally.errorCount
-    tty.msg("Tests Complete.",
-            "%5d tests run" % tally.numberOfTestsRun,
-            "%5d failures" % tally.failCount,
-            "%5d errors" % tally.errorCount)
+    tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun,
+            "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount)
 
     if succeeded:
         tty.info("OK", format='g')
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index 946d267c06860f8744a2fd5d73ae4ba54f1a7c76..0d16e8c656d27e82b490270b908299850170f2c5 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -72,6 +72,11 @@ def setUp(self):
         os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7"
         os.environ['SPACK_SHORT_SPEC'] = "foo@1.2"
 
+        os.environ['SPACK_CC_RPATH_ARG']  = "-Wl,-rpath,"
+        os.environ['SPACK_CXX_RPATH_ARG'] = "-Wl,-rpath,"
+        os.environ['SPACK_F77_RPATH_ARG'] = "-Wl,-rpath,"
+        os.environ['SPACK_FC_RPATH_ARG']  = "-Wl,-rpath,"
+
         # Make some fake dependencies
         self.tmp_deps = tempfile.mkdtemp()
         self.dep1 = join_path(self.tmp_deps, 'dep1')
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index 0562d2d620953ed85e64b689752afdd0463aedf5..ed0797a5415441c27d4d469f6529c19cbe3fe106 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -33,7 +33,7 @@
 
 # Some sample compiler config data
 a_comps =  {
-    "all": {
+    "x86_64_E5v2_IntelIB": {
         "gcc@4.7.3" : {
             "cc" : "/gcc473",
             "cxx": "/g++473",
@@ -53,7 +53,7 @@
 }
 
 b_comps = {
-    "all": {
+    "x86_64_E5v3": {
         "icc@10.0" : {
             "cc" : "/icc100",
             "cxx": "/icc100",
@@ -72,6 +72,10 @@
     }
 }
 
+# Some Sample repo data
+repos_low = [ "/some/path" ]
+repos_high = [ "/some/other/path" ]
+
 class ConfigTest(MockPackagesTest):
 
     def setUp(self):
@@ -85,17 +89,21 @@ def tearDown(self):
         super(ConfigTest, self).tearDown()
         shutil.rmtree(self.tmp_dir, True)
 
-
-    def check_config(self, comps, *compiler_names):
+    def check_config(self, comps, arch, *compiler_names):
         """Check that named compilers in comps match Spack's config."""
         config = spack.config.get_config('compilers')
         compiler_list = ['cc', 'cxx', 'f77', 'fc']
         for key in compiler_names:
             for c in compiler_list:
-                expected = comps['all'][key][c]
-                actual = config['all'][key][c]
+                expected = comps[arch][key][c]
+                actual = config[arch][key][c]
                 self.assertEqual(expected, actual)
 
+    def test_write_list_in_memory(self):
+        spack.config.update_config('repos', repos_low, 'test_low_priority')
+        spack.config.update_config('repos', repos_high, 'test_high_priority')
+        config = spack.config.get_config('repos')
+        self.assertEqual(config, repos_high+repos_low)
 
     def test_write_key_in_memory(self):
         # Write b_comps "on top of" a_comps.
@@ -103,9 +111,8 @@ def test_write_key_in_memory(self):
         spack.config.update_config('compilers', b_comps, 'test_high_priority')
 
         # Make sure the config looks how we expect.
-        self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
-        self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
-
+        self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
+        self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
 
     def test_write_key_to_disk(self):
         # Write b_comps "on top of" a_comps.
@@ -116,5 +123,17 @@ def test_write_key_to_disk(self):
         spack.config.clear_config_caches()
 
         # Same check again, to ensure consistency.
-        self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
-        self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
+        self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
+        self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+
+    def test_write_to_same_priority_file(self):
+        # Write b_comps in the same file as a_comps.
+        spack.config.update_config('compilers', a_comps, 'test_low_priority')
+        spack.config.update_config('compilers', b_comps, 'test_low_priority')
+
+        # Clear caches so we're forced to read from disk.
+        spack.config.clear_config_caches()
+
+        # Same check again, to ensure consistency.
+        self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
+        self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
new file mode 100644
index 0000000000000000000000000000000000000000..c65d663250f7b0e8211f7db8dadf67f8718b58a8
--- /dev/null
+++ b/lib/spack/spack/test/modules.py
@@ -0,0 +1,157 @@
+import collections
+from contextlib import contextmanager
+
+import StringIO
+import spack.modules
+from spack.test.mock_packages_test import MockPackagesTest
+
+FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+
+
+# Monkey-patch open to write module files to a StringIO instance
+@contextmanager
+def mock_open(filename, mode):
+    if not mode == 'w':
+        raise RuntimeError(
+            'test.modules : unexpected opening mode for monkey-patched open')
+
+    FILE_REGISTRY[filename] = StringIO.StringIO()
+
+    try:
+        yield FILE_REGISTRY[filename]
+    finally:
+        handle = FILE_REGISTRY[filename]
+        FILE_REGISTRY[filename] = handle.getvalue()
+        handle.close()
+
+
+configuration_autoload_direct = {
+    'enable': ['tcl'],
+    'tcl': {
+        'all': {
+            'autoload': 'direct'
+        }
+    }
+}
+
+configuration_autoload_all = {
+    'enable': ['tcl'],
+    'tcl': {
+        'all': {
+            'autoload': 'all'
+        }
+    }
+}
+
+configuration_alter_environment = {
+    'enable': ['tcl'],
+    'tcl': {
+        'all': {
+            'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
+        },
+        '=x86-linux': {
+            'environment': {'set': {'FOO': 'foo'},
+                            'unset': ['BAR']}
+        }
+    }
+}
+
+configuration_blacklist = {
+    'enable': ['tcl'],
+    'tcl': {
+        'blacklist': ['callpath'],
+        'all': {
+            'autoload': 'direct'
+        }
+    }
+}
+
+configuration_conflicts = {
+    'enable': ['tcl'],
+    'tcl': {
+        'naming_scheme': '{name}/{version}-{compiler.name}',
+        'all': {
+            'conflict': ['{name}', 'intel/14.0.1']
+        }
+    }
+}
+
+
+class TclTests(MockPackagesTest):
+    def setUp(self):
+        super(TclTests, self).setUp()
+        self.configuration_obj = spack.modules.CONFIGURATION
+        spack.modules.open = mock_open
+        # Make sure that a non-mocked configuration will trigger an error
+        spack.modules.CONFIGURATION = None
+
+    def tearDown(self):
+        del spack.modules.open
+        spack.modules.CONFIGURATION = self.configuration_obj
+        super(TclTests, self).tearDown()
+
+    def get_modulefile_content(self, spec):
+        spec.concretize()
+        generator = spack.modules.TclModule(spec)
+        generator.write()
+        content = FILE_REGISTRY[generator.file_name].split('\n')
+        return content
+
+    def test_simple_case(self):
+        spack.modules.CONFIGURATION = configuration_autoload_direct
+        spec = spack.spec.Spec('mpich@3.0.4=x86-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertTrue('module-whatis "mpich @3.0.4"' in content)
+
+    def test_autoload(self):
+        spack.modules.CONFIGURATION = configuration_autoload_direct
+        spec = spack.spec.Spec('mpileaks=x86-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
+        self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
+
+        spack.modules.CONFIGURATION = configuration_autoload_all
+        spec = spack.spec.Spec('mpileaks=x86-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
+        self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
+
+    def test_alter_environment(self):
+        spack.modules.CONFIGURATION = configuration_alter_environment
+        spec = spack.spec.Spec('mpileaks=x86-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertEqual(
+            len([x
+                 for x in content
+                 if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
+        self.assertEqual(
+            len([x for x in content if 'setenv FOO "foo"' in x]), 1)
+        self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
+
+        spec = spack.spec.Spec('libdwarf=x64-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertEqual(
+            len([x
+                 for x in content
+                 if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
+        self.assertEqual(
+            len([x for x in content if 'setenv FOO "foo"' in x]), 0)
+        self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0)
+
+    def test_blacklist(self):
+        spack.modules.CONFIGURATION = configuration_blacklist
+        spec = spack.spec.Spec('mpileaks=x86-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
+        self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
+
+    def test_conflicts(self):
+        spack.modules.CONFIGURATION = configuration_conflicts
+        spec = spack.spec.Spec('mpileaks=x86-linux')
+        content = self.get_modulefile_content(spec)
+        self.assertEqual(
+            len([x for x in content if x.startswith('conflict')]), 2)
+        self.assertEqual(
+            len([x for x in content if x == 'conflict mpileaks']), 1)
+        self.assertEqual(
+            len([x for x in content if x == 'conflict intel/14.0.1']), 1)
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index f51f05cad7d27be9545eb989b58fb8cc3eabcace..ad51da9d47fae44b13beae6b855f9f1744a97068 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -206,6 +206,9 @@ def parse_version_offset(path):
         # e.g. lame-398-1
         (r'-((\d)+-\d)', stem),
 
+        # e.g. foobar_1.2-3
+        (r'_((\d+\.)+\d+(-\d+)?[a-z]?)', stem),
+
         # e.g. foobar-4.5.1
         (r'-((\d+\.)*\d+)$', stem),
 
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index fc27b789d062ea8ff2da153fcb6af6d2a018c8c7..25819b6fc75abe4c0ec8fa710feacd2dce34be8a 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -144,7 +144,7 @@ def streamify(arg, mode):
 
         cmd = self.exe + list(args)
 
-        cmd_line = ' '.join(cmd)
+        cmd_line = "'%s'" % "' '".join(map(lambda arg: arg.replace("'", "'\"'\"'"), cmd))
         tty.debug(cmd_line)
 
         try:
diff --git a/share/spack/qa/run-flake8 b/share/spack/qa/run-flake8
new file mode 100755
index 0000000000000000000000000000000000000000..722c7fcba6e6247274d5f4db159a358fc01c8d5d
--- /dev/null
+++ b/share/spack/qa/run-flake8
@@ -0,0 +1,55 @@
+#!/bin/bash
+#
+# This script runs source code style checks on Spack.
+#
+# It should be executed from the top-level directory of the repo,
+# e.g.:
+#
+#    share/spack/qa/run-flake8
+#
+# To run it, you'll need to have the Python flake8 installed locally.
+#
+PYTHONPATH=./lib/spack:$PYTHONPATH
+
+flake8="$(which flake8)"
+if [[ ! $flake8 ]]; then
+    echo "ERROR: flake8 is required to run this script."
+    exit 1
+fi
+
+# Check if changed files are flake8 conformant [framework]
+changed=$(git diff --name-only develop... | grep '.py$')
+
+# Exempt url lines in changed packages from overlong line errors.
+for file in $changed; do
+    if [[ $file = *package.py ]]; then
+        perl -i~ -pe 's/^(\s*url\s*=.*)$/\1  # NOQA: ignore=E501/' $file;
+    fi
+done
+
+return_code=0
+if [[ $changed ]]; then
+    echo =======================================================
+    echo  flake8: running flake8 code checks on spack.
+    echo
+    echo  Modified files:
+    echo  $changed | perl -pe 's/^/  /;s/ +/\n  /g'
+    echo =======================================================
+    if flake8 --format pylint $changed; then
+        echo "Flake8 checks were clean."
+    else
+        echo "Flake8 found errors."
+        return_code=1
+    fi
+else
+    echo No core framework files modified.
+fi
+
+# Restore original package files after modifying them.
+for file in $changed; do
+    if [[ $file = *package.py ]]; then
+        mv "${file}~" "${file}"
+    fi
+done
+
+exit $return_code
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index 11a4c0a70c4d79a7586f978cd8844856be86bba5..dba6f1eff407cc3fdc2a6afbbdf44da1a395ca8b 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -41,7 +41,7 @@
 # commands.  This allows the user to use packages without knowing all
 # their installation details.
 #
-# e.g., rather than requring a full spec for libelf, the user can type:
+# e.g., rather than requiring a full spec for libelf, the user can type:
 #
 #     spack use libelf
 #
@@ -113,11 +113,11 @@ function spack {
                         unuse $_sp_module_args $_sp_full_spec
                     fi ;;
                 "load")
-                    if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then
+                    if _sp_full_spec=$(command spack $_sp_flags module find tcl $_sp_spec); then
                         module load $_sp_module_args $_sp_full_spec
                     fi ;;
                 "unload")
-                    if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then
+                    if _sp_full_spec=$(command spack $_sp_flags module find tcl $_sp_spec); then
                         module unload $_sp_module_args $_sp_full_spec
                     fi ;;
             esac
diff --git a/var/spack/repos/builtin/packages/ImageMagick/package.py b/var/spack/repos/builtin/packages/ImageMagick/package.py
index 753ea80ca6e38c63d5e431247295cceebe180193..3a86d9fb7ca940c98c8c21ada8ca3626cf21acf4 100644
--- a/var/spack/repos/builtin/packages/ImageMagick/package.py
+++ b/var/spack/repos/builtin/packages/ImageMagick/package.py
@@ -1,10 +1,11 @@
 from spack import *
 
+
 class Imagemagick(Package):
     """ImageMagick is a image processing library"""
     homepage = "http://www.imagemagic.org"
 
-    #-------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     # ImageMagick does not keep around anything but *-10 versions, so
     # this URL may change.  If you want the bleeding edge, you can
     # uncomment it and see if it works but you may need to try to
@@ -17,14 +18,16 @@ class Imagemagick(Package):
     # version('6.9.0-6', 'c1bce7396c22995b8bdb56b7797b4a1b',
     # url="http://www.imagemagick.org/download/ImageMagick-6.9.0-6.tar.bz2")
 
-    #-------------------------------------------------------------------------
+    # -------------------------------------------------------------------------
     # *-10 versions are archived, so these versions should fetch reliably.
     # -------------------------------------------------------------------------
-    version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c',
-            url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download")
+    version(
+        '6.8.9-10',
+        'aa050bf9785e571c956c111377bbf57c',
+        url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download")
 
-    depends_on('libtool')
     depends_on('jpeg')
+    depends_on('libtool')
     depends_on('libpng')
     depends_on('freetype')
     depends_on('fontconfig')
@@ -32,6 +35,5 @@ class Imagemagick(Package):
 
     def install(self, spec, prefix):
         configure("--prefix=%s" % prefix)
-
         make()
         make("install")
diff --git a/var/spack/repos/builtin/packages/LuaJIT/package.py b/var/spack/repos/builtin/packages/LuaJIT/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b2a26921212269d564bcbd3a746d2af100bc658
--- /dev/null
+++ b/var/spack/repos/builtin/packages/LuaJIT/package.py
@@ -0,0 +1,15 @@
+import os
+from spack import *
+
+class Luajit(Package):
+    """Flast flexible JITed lua"""
+    homepage = "http://www.luajit.org"
+    url      = "http://luajit.org/download/LuaJIT-2.0.4.tar.gz"
+
+    version('2.0.4', 'dd9c38307f2223a504cbfb96e477eca0')
+
+    def install(self, spec, prefix):
+        # Linking with the C++ compiler is a dirty hack to deal with the fact
+        # that unwinding symbols are not included by libc, this is necessary
+        # on some platforms for the final link stage to work
+        make("install", "PREFIX=" + prefix, "TARGET_LD=" + os.environ['CXX'])
diff --git a/var/spack/repos/builtin/packages/Mitos/package.py b/var/spack/repos/builtin/packages/Mitos/package.py
index ea131872ddb9021f4583d56721b1ad13d37ea541..ec1d56a5c7b94a52c982e5c540963189ca283817 100644
--- a/var/spack/repos/builtin/packages/Mitos/package.py
+++ b/var/spack/repos/builtin/packages/Mitos/package.py
@@ -1,19 +1,18 @@
 from spack import *
 
+
 class Mitos(Package):
     """Mitos is a library and a tool for collecting sampled memory
     performance data to view with MemAxes"""
 
     homepage = "https://github.com/llnl/Mitos"
-    url      = "https://github.com/llnl/Mitos"
+    url = "https://github.com/llnl/Mitos"
 
     version('0.9.2',
             git='https://github.com/llnl/Mitos.git',
             commit='8cb143a2e8c00353ff531a781a9ca0992b0aaa3d')
 
-    version('0.9.1',
-            git='https://github.com/llnl/Mitos.git',
-            tag='v0.9.1')
+    version('0.9.1', git='https://github.com/llnl/Mitos.git', tag='v0.9.1')
 
     depends_on('dyninst@8.2.1:')
     depends_on('hwloc')
diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py
index 2471dff09b6d24cbbb0d5e8ca267283ffdbe4b41..7c4aa3520c827ac4278774c7d439c350524c073a 100644
--- a/var/spack/repos/builtin/packages/R/package.py
+++ b/var/spack/repos/builtin/packages/R/package.py
@@ -1,4 +1,14 @@
+import functools
+import glob
+import inspect
+import os
+import re
+from contextlib import closing
+
+import spack
+from llnl.util.lang import match_predicate
 from spack import *
+from spack.util.environment import *
 
 
 class R(Package):
@@ -9,6 +19,8 @@ class R(Package):
     """
     homepage = "https://www.r-project.org"
     url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
+    
+    extendable = True
 
     version('3.2.3', '1ba3dac113efab69e706902810cc2970')
     version('3.2.2', '57cef5c2e210a5454da1979562a10e5b')
@@ -38,12 +50,57 @@ class R(Package):
     depends_on('tk')
 
     def install(self, spec, prefix):
+        rlibdir = join_path(prefix, 'rlib')
         options = ['--prefix=%s' % prefix,
+                   '--libdir=%s' % rlibdir,
                    '--enable-R-shlib',
-                   '--enable-BLAS-shlib']
+                   '--enable-BLAS-shlib',
+                   '--enable-R-framework=no']
         if '+external-lapack' in spec:
             options.extend(['--with-blas', '--with-lapack'])
 
         configure(*options)
         make()
         make('install')
+
+    # ========================================================================
+    # Set up environment to make install easy for R extensions.
+    # ========================================================================
+
+    @property
+    def r_lib_dir(self):
+        return os.path.join('rlib', 'R', 'library')
+
+    def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+        # Set R_LIBS to include the library dir for the
+        # extension and any other R extensions it depends on.
+        r_libs_path = []
+        for d in extension_spec.traverse():
+            if d.package.extends(self.spec):
+                r_libs_path.append(os.path.join(d.prefix, self.r_lib_dir))
+
+        r_libs_path = ':'.join(r_libs_path)
+        spack_env.set('R_LIBS', r_libs_path)
+
+        # For run time environment set only the path for extension_spec and prepend it to R_LIBS
+        if extension_spec.package.extends(self.spec):
+            run_env.prepend_path('R_LIBS', os.path.join(extension_spec.prefix, self.r_lib_dir))
+
+
+    def setup_dependent_package(self, module, ext_spec):
+        """
+        Called before R modules' install() methods.
+
+        In most cases, extensions will only need to have one line::
+
+	R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
+        """
+        # R extension builds can have a global R executable function
+        module.R = Executable(join_path(self.spec.prefix.bin, 'R'))
+
+        # Add variable for library directry
+        module.r_lib_dir = os.path.join(ext_spec.prefix, self.r_lib_dir)
+
+        # Make the site packages directory for extensions, if it does not exist already.
+        if ext_spec.package.is_extension:
+            mkdirp(module.r_lib_dir)
diff --git a/var/spack/repos/builtin/packages/antlr/package.py b/var/spack/repos/builtin/packages/antlr/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..c7c7e3e850d27626f6dc8829de7c20638799eecd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/antlr/package.py
@@ -0,0 +1,47 @@
+from spack import *
+
+class Antlr(Package):
+    
+    homepage = "http://www.antlr.org"
+    url      = "https://github.com/antlr/antlr/tarball/v2.7.7"
+
+    # NOTE: This requires that a system Java be available.
+    # Spack does not yet know how to install Java compilers
+
+    # Notes from http://nco.sourceforge.net/#bld
+    # The first steps to build (i.e., compile, for the most part) NCO from
+    # source code are to install the pre-requisites: ANTLR version 2.7.7
+    # (like this one not version 3.x or 4.x!) (required for ncap2)... ANTLR
+    # binaries from major distributions are pre-built with the source patch
+    # necessary to allow NCO to link to ANTLR... The ANTLR source file
+    # CharScanner.hpp must include this line: #include <cstring> or else
+    # ncap2 will not compile (this tarball is already patched).
+    version('2.7.7', '914865e853fe8e1e61a9f23d045cb4ab',
+        # Patched version as described above
+        url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz')
+        # Unpatched version
+        # url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz')
+
+    variant('cxx', default=False, description='Enable ANTLR for C++')
+    variant('java', default=False, description='Enable ANTLR for Java')
+    variant('python', default=False, description='Enable ANTLR for Python')
+    variant('csharp', default=False, description='Enable ANTLR for Csharp')
+
+
+    def install(self, spec, prefix):
+        # Check for future enabling of variants
+        for v in ('+java', '+python', '+csharp'):
+            if v in spec:
+                raise Error('Illegal variant %s; for now, Spack only knows how to build antlr or antlr+cxx')
+
+        config_args = [
+            '--prefix=%s' % prefix,
+            '--%s-cxx' % ('enable' if '+cxx' in spec else 'disable'),
+            '--%s-java' % ('enable' if '+java' in spec else 'disable'),
+            '--%s-python' % ('enable' if '+python' in spec else 'disable'),
+            '--%s-csharp' % ('enable' if '+csharp' in spec else 'disable')]
+
+        # which('autoreconf')('-iv')
+        configure(*config_args)
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py
index 5274fc018f0b5df8f391204c811d1b6b81a0a43c..57083bb947311ad1df077a5f33cf8044ddd3fa93 100644
--- a/var/spack/repos/builtin/packages/astyle/package.py
+++ b/var/spack/repos/builtin/packages/astyle/package.py
@@ -1,8 +1,11 @@
 from spack import *
-import os
+
 
 class Astyle(Package):
-    """A Free, Fast, and Small Automatic Formatter for C, C++, C++/CLI, Objective-C, C#, and Java Source Code."""
+    """
+    A Free, Fast, and Small Automatic Formatter for C, C++, C++/CLI,
+    Objective-C, C#, and Java Source Code.
+    """
     homepage = "http://astyle.sourceforge.net/"
     url      = "http://downloads.sourceforge.net/project/astyle/astyle/astyle%202.04/astyle_2.04_linux.tar.gz"
 
@@ -11,8 +14,15 @@ class Astyle(Package):
     def install(self, spec, prefix):
 
         with working_dir('src'):
+            # we need to edit the makefile in place to set compiler:
+            make_file = join_path(self.stage.source_path,
+                                  'build', 'gcc', 'Makefile')
+            filter_file(r'^CXX\s*=.*', 'CXX=%s'.format(spack_cxx), make_file)
+
             make('-f',
-                join_path(self.stage.source_path,'build','clang','Makefile'),
-                parallel=False)
+                 make_file,
+                 parallel=False)
+
             mkdirp(self.prefix.bin)
-            install(join_path(self.stage.source_path, 'src','bin','astyle'), self.prefix.bin)
+            install(join_path(self.stage.source_path, 'src', 'bin', 'astyle'),
+                    self.prefix.bin)
diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py
index b8064093d27b61c3bb343dd7d1653d83f950650f..158d722046781de81224e62879ac6139121a4303 100644
--- a/var/spack/repos/builtin/packages/binutils/package.py
+++ b/var/spack/repos/builtin/packages/binutils/package.py
@@ -29,6 +29,7 @@ def install(self, spec, prefix):
         configure_args = [
             '--prefix=%s' % prefix,
             '--disable-dependency-tracking',
+            '--disable-werror',
             '--enable-interwork',
             '--enable-multilib',
             '--enable-shared',
diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py
index 7c526fb95894c934c122b00f2c366410bd4449f6..9a2ddcbf9692c678c4b971ffb3726613d37fdd50 100644
--- a/var/spack/repos/builtin/packages/bison/package.py
+++ b/var/spack/repos/builtin/packages/bison/package.py
@@ -10,6 +10,8 @@ class Bison(Package):
 
     version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8')
 
+    depends_on("m4")
+
     def install(self, spec, prefix):
         configure("--prefix=%s" % prefix)
 
diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
index 7b07933911a40ac069e9091a5f9a9dcb24d2b039..90789a98f29ee0208c7c53a6d3abad6869d02dd0 100644
--- a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
@@ -1,5 +1,5 @@
 ################################################################################
-# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
 #
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -24,43 +24,83 @@ class CbtfArgonavis(Package):
     homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
 
     # Mirror access template example
-    #url      = "file:/g/g24/jeg/cbtf-argonavis-1.5.tar.gz"
-    #version('1.5', '1f7f6512f55409ed2135cfceabe26b82')
+    #url      = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz"
+    #version('1.6', '0fafa0008478405c2c2319450f174ed4')
 
-    version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-argonavis/cbtf-argonavis')
+    version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-argonavis.git')
 
-    depends_on("cmake@3.0.2:")
+    depends_on("cmake@3.0.2")
+    depends_on("boost@1.50.0:")
     depends_on("papi")
+    depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
     depends_on("cbtf")
     depends_on("cbtf-krell")
-    depends_on("cuda")
+    depends_on("cuda@6.0.37")
+    #depends_on("cuda")
 
     parallel = False
 
+    def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
+        # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+
+        compile_flags="-O2 -g"
+        BuildTypeOptions = []
+
+        # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+        for word in cmakeOptions[:]:
+            if word.startswith('-DCMAKE_BUILD_TYPE'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_CXX_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_C_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
+                cmakeOptions.remove(word)
+        BuildTypeOptions.extend([
+                 '-DCMAKE_VERBOSE_MAKEFILE=ON',
+                 '-DCMAKE_BUILD_TYPE=None',
+                 '-DCMAKE_CXX_FLAGS=%s'         % compile_flags,
+                 '-DCMAKE_C_FLAGS=%s'           % compile_flags
+        ])
+
+        cmakeOptions.extend(BuildTypeOptions)
+
+
     def install(self, spec, prefix):
 
        # Look for package installation information in the cbtf and cbtf-krell prefixes
        cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
 
-       # FIXME, hard coded for testing purposes, we will alter when the external package feature is available
-       cuda_prefix_path = "/usr/local/cudatoolkit-6.0"
-       cupti_prefix_path = "/usr/local/cudatoolkit-6.0/extras/CUPTI"
-
-
        with working_dir('CUDA'):
          with working_dir('build', create=True):
-           cmake('..',
-                 '-DCMAKE_INSTALL_PREFIX=%s'	% prefix,
-                 '-DCMAKE_LIBRARY_PATH=%s'	% prefix.lib64,
-                 '-DCMAKE_PREFIX_PATH=%s'	% cmake_prefix_path,
-                 '-DCUDA_INSTALL_PATH=%s'	% cuda_prefix_path,
-                 '-DCUDA_ROOT=%s'		% cuda_prefix_path,
-                 '-DCUPTI_ROOT=%s'		% cupti_prefix_path,
-                 '-DCUDA_DIR=%s'                % cuda_prefix_path,
-                 '-DPAPI_ROOT=%s'		% spec['papi'].prefix,
-                 '-DCBTF_PREFIX=%s'		% spec['cbtf'].prefix,
-                 *std_cmake_args)
-           make("clean")
-           make()
-           make("install")
 
+             cmakeOptions = []
+             cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
+                                  '-DCMAKE_PREFIX_PATH=%s'	% cmake_prefix_path,
+                                  '-DCUDA_DIR=%s'               % spec['cuda'].prefix,
+                                  '-DCUDA_INSTALL_PATH=%s'      % spec['cuda'].prefix,
+                                  '-DCUDA_TOOLKIT_ROOT_DIR=%s'  % spec['cuda'].prefix,
+                                  '-DCUPTI_DIR=%s'		% join_path(spec['cuda'].prefix + '/extras/CUPTI'),
+                                  '-DCUPTI_ROOT=%s'		% join_path(spec['cuda'].prefix + '/extras/CUPTI'),
+                                  '-DPAPI_ROOT=%s'		% spec['papi'].prefix,
+                                  '-DCBTF_DIR=%s'		% spec['cbtf'].prefix,
+                                  '-DCBTF_KRELL_DIR=%s'		% spec['cbtf-krell'].prefix,
+                                  '-DBOOST_ROOT=%s'             % spec['boost'].prefix,
+                                  '-DBoost_DIR=%s'		% spec['boost'].prefix,
+                                  '-DBOOST_LIBRARYDIR=%s'	% spec['boost'].prefix.lib,
+                                  '-DMRNET_DIR=%s'		% spec['mrnet'].prefix,
+                                  '-DBoost_NO_SYSTEM_PATHS=ON'
+                                 ])
+
+             # Add in the standard cmake arguments
+             cmakeOptions.extend(std_cmake_args)
+
+             # Adjust the standard cmake arguments to what we want the build type, etc to be
+             self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+             
+             # Invoke cmake
+             cmake('..', *cmakeOptions)
+
+             make("clean")
+             make()
+             make("install")
diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py
index 9458ac113c9b7117c7e82081391827d663530538..e6050cb4a919127c4a8f563686d8bc391aa00365 100644
--- a/var/spack/repos/builtin/packages/cbtf-krell/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py
@@ -1,5 +1,5 @@
 ################################################################################
-# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
 #
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -26,21 +26,30 @@ class CbtfKrell(Package):
     homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
 
     # optional mirror access template
-    #url      = "file:/g/g24/jeg/cbtf-krell-1.5.tar.gz"
-    #version('1.5', 'b13f6df6a93c44149d977773dd776d2f')
+    #url      = "file:/home/jeg/cbtf-krell-1.6.tar.gz"
+    #version('1.6', 'edeb61cd488f16e7b124f77db9ce762d')
 
-    version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-krell/cbtf-krell')
+    version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-krell.git')
 
+    # MPI variants
+    variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.")
+    variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.")
+    variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.")
+    variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.")
+    variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.")
+    variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
 
     # Dependencies for cbtf-krell
+    depends_on("cmake@3.0.2")
 
     # For binutils service
     depends_on("binutils@2.24+krellpatch")
 
     # collectionTool
-    depends_on("boost@1.50.0")
-    depends_on("dyninst@8.2.1")
-    depends_on("mrnet@4.1.0:+lwthreads")
+    depends_on("boost@1.50.0:")
+    depends_on("dyninst@8.2.1:")
+    depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
+
     depends_on("xerces-c@3.1.1:")
     depends_on("cbtf")
 
@@ -51,66 +60,207 @@ class CbtfKrell(Package):
 
     # MPI Installations
     # These have not worked either for build or execution, commenting out for now
-    #depends_on("openmpi")
-    #depends_on("mvapich2@2.0")
-    #depends_on("mpich")
+    depends_on("openmpi", when='+openmpi')
+    depends_on("mpich", when='+mpich')
+    depends_on("mpich2", when='+mpich2')
+    depends_on("mvapich2", when='+mvapich2')
+    depends_on("mvapich", when='+mvapich')
+    depends_on("mpt", when='+mpt')
 
     parallel = False
 
+    def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
+        # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+ 
+        compile_flags="-O2 -g"
+        BuildTypeOptions = []
+        # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+        for word in cmakeOptions[:]:
+            if word.startswith('-DCMAKE_BUILD_TYPE'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_CXX_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_C_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
+                cmakeOptions.remove(word)
+        BuildTypeOptions.extend([
+                 '-DCMAKE_VERBOSE_MAKEFILE=ON',
+                 '-DCMAKE_BUILD_TYPE=None',
+                 '-DCMAKE_CXX_FLAGS=%s'         % compile_flags,
+                 '-DCMAKE_C_FLAGS=%s'           % compile_flags
+        ])
+
+        cmakeOptions.extend(BuildTypeOptions)
+
+
+
+    def set_mpi_cmakeOptions(self, spec, cmakeOptions):
+        # Appends to cmakeOptions the options that will enable the appropriate MPI implementations
+ 
+        MPIOptions = []
+
+        # openmpi
+        if '+openmpi' in spec:
+            MPIOptions.extend([
+                 '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix
+            ])
+        # mpich
+        if '+mpich' in spec:
+            MPIOptions.extend([
+                 '-DMPICH_DIR=%s' % spec['mpich'].prefix
+            ])
+        # mpich2
+        if '+mpich2' in spec:
+            MPIOptions.extend([
+                 '-DMPICH2_DIR=%s' % spec['mpich2'].prefix
+            ])
+        # mvapich
+        if '+mvapich' in spec:
+            MPIOptions.extend([
+                 '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix
+            ])
+        # mvapich2
+        if '+mvapich2' in spec:
+            MPIOptions.extend([
+                 '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix
+            ])
+        # mpt
+        if '+mpt' in spec:
+            MPIOptions.extend([
+                 '-DMPT_DIR=%s' % spec['mpt'].prefix
+            ])
+
+        cmakeOptions.extend(MPIOptions)
+
     def install(self, spec, prefix):
 
         # Add in paths for finding package config files that tell us where to find these packages
-        cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix)
-
-        # FIXME - hard code path until external package support is available
-        # Need to change this path and/or add additional paths for MPI experiment support on different platforms
-        #openmpi_prefix_path = "/opt/openmpi-1.8.2"
-        #mvapich_prefix_path = "/usr/local/tools/mvapich-gnu"
-
-        # Other possibilities, they will need a -DMVAPICH_DIR=, etc clause in the cmake command to be recognized
-        # mvapich_prefix_path = "<mvapich install path>"
-        # mvapich2_prefix_path = "<mvapich2 install path>"
-        # mpich2_prefix_path = "<mpich2 install path>"
-        # mpich_prefix_path = "<mpich install path>"
-        # mpt_prefix_path = "<mpt install path>"
-
-        # Add in paths for cuda if requested via the cuda variant
-        # FIXME - hard code path until external package support is available
-        #if '+cuda' in spec:
-        #    cuda_prefix_path = "/usr/local/cuda-6.0"
-        #    cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI"
-        #else:
-        #    cuda_prefix_path = ""
-        #    cupti_prefix_path = ""
-
-        #'-DMVAPICH2_DIR=%s'           % spec['mvapich2'].prefix,
-        #'-DOPENMPI_DIR=%s'            % spec['openmpi'].prefix,
-        #'-DMPICH_DIR=%s'              % spec['mpich'].prefix,
-        #'-DCMAKE_LIBRARY_PATH=%s'	% prefix.lib64,
-        #'-DOPENMPI_DIR=%s'            % openmpi_prefix_path,
-        #'-DMVAPICH_DIR=%s'            % mvapich_prefix_path,
-        #'-DLIB_SUFFIX=64',
-        #'-DCUDA_DIR=%s'               % cuda_prefix_path,
-        #'-DCUPTI_DIR=%s'              % cupti_prefix_path,
+        #cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix)
+                                 #'-DCMAKE_PREFIX_PATH=%s'	% cmake_prefix_path
 
         # Build cbtf-krell with cmake 
         with working_dir('build_cbtf_krell', create=True):
-            cmake('..',
-                  '-DCMAKE_BUILD_TYPE=Debug',
-                  '-DCMAKE_INSTALL_PREFIX=%s'	% prefix,
-                  '-DCBTF_DIR=%s'		% spec['cbtf'].prefix,
-                  '-DBINUTILS_DIR=%s'           % spec['binutils'].prefix,
-                  '-DLIBMONITOR_DIR=%s'         % spec['libmonitor'].prefix,
-                  '-DLIBUNWIND_DIR=%s'          % spec['libunwind'].prefix,
-                  '-DPAPI_DIR=%s'               % spec['papi'].prefix,
-                  '-DBOOST_DIR=%s'              % spec['boost'].prefix,
-                  '-DMRNET_DIR=%s'              % spec['mrnet'].prefix,
-                  '-DDYNINST_DIR=%s'		% spec['dyninst'].prefix,
-                  '-DXERCESC_DIR=%s'            % spec['xerces-c'].prefix,
-                  '-DCMAKE_PREFIX_PATH=%s'	% cmake_prefix_path,
-                  *std_cmake_args)
+            cmakeOptions = []
+            cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'	% prefix,
+                                 '-DCBTF_DIR=%s' 		% spec['cbtf'].prefix,
+                                 '-DBINUTILS_DIR=%s'		% spec['binutils'].prefix,
+                                 '-DLIBMONITOR_DIR=%s'		% spec['libmonitor'].prefix,
+                                 '-DLIBUNWIND_DIR=%s'		% spec['libunwind'].prefix,
+                                 '-DPAPI_DIR=%s'		% spec['papi'].prefix,
+                                 '-DBOOST_DIR=%s'		% spec['boost'].prefix,
+                                 '-DMRNET_DIR=%s'		% spec['mrnet'].prefix,
+                                 '-DDYNINST_DIR=%s'		% spec['dyninst'].prefix,
+                                 '-DXERCESC_DIR=%s'		% spec['xerces-c'].prefix
+                                ])
+
+
+            # Add any MPI implementations coming from variant settings
+            self.set_mpi_cmakeOptions(spec, cmakeOptions)
+
+            # Add in the standard cmake arguments
+            cmakeOptions.extend(std_cmake_args)
+
+            # Adjust the standard cmake arguments to what we want the build type, etc to be
+            self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+             
+            # Invoke cmake
+            cmake('..', *cmakeOptions)
 
             make("clean")
             make()
             make("install")
 
+
+
+        #if '+cray' in spec:
+	#if 'cray' in self.spec.architecture:
+        #    if '+runtime' in spec:
+        #        with working_dir('build_cbtf_cray_runtime', create=True):
+        #            python_vers='%d.%d' % spec['python'].version[:2]
+        #            cmake .. \
+        #                    -DCMAKE_BUILD_TYPE=Debug \
+        #                    -DTARGET_OS="cray" \
+        #                    -DRUNTIME_ONLY="true" \
+        #                    -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
+        #                    -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
+        #                    -DCBTF_DIR=${CBTF_ROOT} \
+        #                    -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \
+        #                    -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \
+        #                    -DBINUTILS_DIR=${KRELL_ROOT} \
+        #                    -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \
+        #                    -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \
+        #                    -DPAPI_DIR=${PAPI_ROOT} \
+        #                    -DDYNINST_DIR=${DYNINST_CN_ROOT} \
+        #                    -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \
+        #                    -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
+        #    else:
+        #        with working_dir('build_cbtf_cray_frontend', create=True):
+        #            python_vers='%d.%d' % spec['python'].version[:2]
+        #            cmake .. \
+        #                    -DCMAKE_BUILD_TYPE=Debug \
+        #                    -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
+        #                    -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
+        #                    -DCBTF_DIR=${CBTF_ROOT} \
+        #                    -DRUNTIME_TARGET_OS="cray" \
+        #                    -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \
+        #                    -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \
+        #                    -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \
+        #                    -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \
+        #                    -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \
+        #                    -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \
+        #                    -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \
+        #                    -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \
+        #                    -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \
+        #                    -DBOOST_ROOT=/${KRELL_ROOT} \
+        #                    -DXERCESC_DIR=/${KRELL_ROOT} \
+        #                    -DBINUTILS_DIR=/${KRELL_ROOT} \
+        #                    -DLIBMONITOR_DIR=${KRELL_ROOT} \
+        #                    -DLIBUNWIND_DIR=${KRELL_ROOT} \
+        #                    -DPAPI_DIR=${PAPI_ROOT} \
+        #                    -DDYNINST_DIR=${KRELL_ROOT} \
+        #                    -DMRNET_DIR=${KRELL_ROOT} \
+        #                    -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
+        #    fi
+#
+#                    make("clean")
+#                    make()
+#                    make("install")
+#
+#        elif '+mic' in spec:
+#            if '+runtime' in spec:
+#                with working_dir('build_cbtf_mic_runtime', create=True):
+#                    python_vers='%d.%d' % spec['python'].version[:2]
+#                    cmake .. \
+#
+#            else:
+#                with working_dir('build_cbtf_cray_frontend', create=True):
+#                    python_vers='%d.%d' % spec['python'].version[:2]
+#                    cmake .. \
+#            fi
+#
+#        else:
+#            # Build cbtf-krell with cmake 
+#            with working_dir('build_cbtf_krell', create=True):
+#                cmake('..',
+#                      '-DCMAKE_BUILD_TYPE=Debug',
+#                      '-DCMAKE_INSTALL_PREFIX=%s'	% prefix,
+#                      '-DCBTF_DIR=%s' 			% spec['cbtf'].prefix,
+#                      '-DBINUTILS_DIR=%s'		% spec['binutils'].prefix,
+#                      '-DLIBMONITOR_DIR=%s'		% spec['libmonitor'].prefix,
+#                      '-DLIBUNWIND_DIR=%s'		% spec['libunwind'].prefix,
+#                      '-DPAPI_DIR=%s'			% spec['papi'].prefix,
+#                      '-DBOOST_DIR=%s'			% spec['boost'].prefix,
+#                      '-DMRNET_DIR=%s'			% spec['mrnet'].prefix,
+#                      '-DDYNINST_DIR=%s'		% spec['dyninst'].prefix,
+#                      '-DXERCESC_DIR=%s'		% spec['xerces-c'].prefix,
+#                      '-DOPENMPI_DIR=%s'		% openmpi_prefix_path,
+#                      '-DCMAKE_PREFIX_PATH=%s'		% cmake_prefix_path,
+#                      *std_cmake_args)
+#
+#                make("clean")
+#                make()
+#                make("install")
+#
+#        fi
+#
diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
index 2da9e8a1f74bb45336289e9445b705658c5be30e..5ca88601f3d0bc531568afd289e1ad6d44f0f164 100644
--- a/var/spack/repos/builtin/packages/cbtf-lanl/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
@@ -1,5 +1,5 @@
 ################################################################################
-# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
 #
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -29,32 +29,65 @@ class CbtfLanl(Package):
 
     version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl')
 
-
+    depends_on("cmake@3.0.2")
     # Dependencies for cbtf-krell
-    depends_on("boost@1.50")
-    depends_on("mrnet@4.1.0:+lwthreads")
+    depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
     depends_on("xerces-c@3.1.1:")
     depends_on("cbtf")
     depends_on("cbtf-krell")
 
     parallel = False
 
+    def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
+        # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+
+        compile_flags="-O2 -g"
+        BuildTypeOptions = []
+        # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+        for word in cmakeOptions[:]:
+            if word.startswith('-DCMAKE_BUILD_TYPE'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_CXX_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_C_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
+                cmakeOptions.remove(word)
+        BuildTypeOptions.extend([
+                 '-DCMAKE_VERBOSE_MAKEFILE=ON',
+                 '-DCMAKE_BUILD_TYPE=None',
+                 '-DCMAKE_CXX_FLAGS=%s'         % compile_flags,
+                 '-DCMAKE_C_FLAGS=%s'           % compile_flags
+        ])
+
+        cmakeOptions.extend(BuildTypeOptions)
+
     def install(self, spec, prefix):
 
      # Add in paths for finding package config files that tell us where to find these packages
      cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
 
      with working_dir('build', create=True):
-          cmake('..',
-                '-DCBTF_DIR=%s'            % spec['cbtf'].prefix,
-                '-DCBTF_KRELL_DIR=%s'      % spec['cbtf-krell'].prefix,
-                '-DMRNET_DIR=%s'           % spec['mrnet'].prefix,
-                '-DXERCESC_DIR=%s'         % spec['xerces-c'].prefix,
-                '-DCMAKE_PREFIX_PATH=%s'   % cmake_prefix_path,
-                '-DCMAKE_MODULE_PATH=%s'   % join_path(prefix.share,'KrellInstitute','cmake'),
-                *std_cmake_args)
-
-          make("clean")
-          make()
-          make("install")
+         cmakeOptions = []
+         cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
+                              '-DCBTF_DIR=%s'               % spec['cbtf'].prefix,
+                              '-DCBTF_KRELL_DIR=%s'         % spec['cbtf-krell'].prefix,
+                              '-DMRNET_DIR=%s'              % spec['mrnet'].prefix,
+                              '-DXERCESC_DIR=%s'            % spec['xerces-c'].prefix,
+                              '-DCMAKE_PREFIX_PATH=%s'      % cmake_prefix_path,
+                              '-DCMAKE_MODULE_PATH=%s'      % join_path(prefix.share,'KrellInstitute','cmake')
+                             ])
+
+         # Add in the standard cmake arguments
+         cmakeOptions.extend(std_cmake_args)
+
+         # Adjust the standard cmake arguments to what we want the build type, etc to be
+         self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+             
+         # Invoke cmake
+         cmake('..', *cmakeOptions)
+
+         make("clean")
+         make()
+         make("install")
 
diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py
index 52e6a07020a532c77b7a721998c2a442cf3eb50c..7ce1cd382bad04bec7a4fa41d00c5d4667cad909 100644
--- a/var/spack/repos/builtin/packages/cbtf/package.py
+++ b/var/spack/repos/builtin/packages/cbtf/package.py
@@ -1,5 +1,5 @@
 ################################################################################
-# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
 #
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -25,21 +25,44 @@ class Cbtf(Package):
     homepage = "http://sourceforge.net/p/cbtf/wiki/Home"
 
     # Mirror access template example
-    #url      = "file:/g/g24/jeg/cbtf-1.5.tar.gz"
-    #version('1.6', '1ca88a8834759c4c74452cb97fe7b70a')
+    #url      = "file:/home/jeg/cbtf-1.6.tar.gz"
+    #version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987')
 
     # Use when the git repository is available
-    version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf/cbtf')
+    version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf.git')
 
-    depends_on("cmake")
-    #depends_on("boost@1.42.0:")
-    depends_on("boost@1.50.0")
-    depends_on("mrnet@4.1.0+lwthreads")
+    variant('runtime', default=False, description="build only the runtime libraries and collectors.")
+
+    depends_on("cmake@3.0.2")
+    depends_on("boost@1.50.0:")
+    depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
     depends_on("xerces-c@3.1.1:")
-    depends_on("libxml2")
+    # Work around for spack libxml2 package bug, take off python when fixed
+    depends_on("libxml2+python")
 
     parallel = False
 
+    def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
+        # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+ 
+        compile_flags="-O2 -g"
+        BuildTypeOptions = []
+        # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+        for word in cmakeOptions[:]:
+            if word.startswith('-DCMAKE_BUILD_TYPE'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_CXX_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_C_FLAGS'):
+                cmakeOptions.remove(word)
+        BuildTypeOptions.extend([
+                 '-DCMAKE_BUILD_TYPE=None',
+                 '-DCMAKE_CXX_FLAGS=%s'         % compile_flags,
+                 '-DCMAKE_C_FLAGS=%s'           % compile_flags
+        ])
+
+        cmakeOptions.extend(BuildTypeOptions)
+
     def install(self, spec, prefix):
       with working_dir('build', create=True):
 
@@ -48,14 +71,45 @@ def install(self, spec, prefix):
           # or BOOST_INCLUDEDIR).  Useful when specifying BOOST_ROOT. 
           # Defaults to OFF.
 
-          cmake('..',
-                '--debug-output',
-                '-DBoost_NO_SYSTEM_PATHS=TRUE',
-                '-DXERCESC_DIR=%s'         % spec['xerces-c'].prefix,
-                '-DBOOST_ROOT=%s'          % spec['boost'].prefix,
-                '-DMRNET_DIR=%s'           % spec['mrnet'].prefix,
-                '-DCMAKE_MODULE_PATH=%s'   % join_path(prefix.share,'KrellInstitute','cmake'),
-                *std_cmake_args)
+          if '+runtime' in spec:
+              # Install message tag include file for use in Intel MIC cbtf-krell build
+              # FIXME
+              cmakeOptions = []
+              cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'	% prefix,
+                                   '-DBoost_NO_SYSTEM_PATHS=TRUE',
+                                   '-DXERCESC_DIR=%s'         % spec['xerces-c'].prefix,
+                                   '-DBOOST_ROOT=%s'          % spec['boost'].prefix,
+                                   '-DMRNET_DIR=%s'           % spec['mrnet'].prefix,
+                                   '-DCMAKE_MODULE_PATH=%s'   % join_path(prefix.share,'KrellInstitute','cmake')
+                                  ])
+
+              # Add in the standard cmake arguments
+              cmakeOptions.extend(std_cmake_args)
+
+              # Adjust the standard cmake arguments to what we want the build type, etc to be
+              self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+             
+              # Invoke cmake
+              cmake('..', *cmakeOptions)
+
+          else:
+              cmakeOptions = []
+              cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'	% prefix,
+                                   '-DBoost_NO_SYSTEM_PATHS=TRUE',
+                                   '-DXERCESC_DIR=%s'         % spec['xerces-c'].prefix,
+                                   '-DBOOST_ROOT=%s'          % spec['boost'].prefix,
+                                   '-DMRNET_DIR=%s'           % spec['mrnet'].prefix,
+                                   '-DCMAKE_MODULE_PATH=%s'   % join_path(prefix.share,'KrellInstitute','cmake')
+                                  ])
+
+              # Add in the standard cmake arguments
+              cmakeOptions.extend(std_cmake_args)
+
+              # Adjust the standard cmake arguments to what we want the build type, etc to be
+              self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+             
+              # Invoke cmake
+              cmake('..', *cmakeOptions)
 
           make("clean")
           make()
diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py
index bc83cb2b651aaa953160bb7c1ce7b833a4a3a84d..c2778e14da3fdb363c66a6b4ce81fab20615f315 100644
--- a/var/spack/repos/builtin/packages/cryptopp/package.py
+++ b/var/spack/repos/builtin/packages/cryptopp/package.py
@@ -13,6 +13,7 @@ class Cryptopp(Package):
 
     version('5.6.3', '3c5b70e2ec98b7a24988734446242d07')
     version('5.6.2', '7ed022585698df48e65ce9218f6c6a67')
+    version('5.6.1', '96cbeba0907562b077e26bcffb483828')
 
     def install(self, spec, prefix):
         make()
diff --git a/var/spack/repos/builtin/packages/fftw/package.py b/var/spack/repos/builtin/packages/fftw/package.py
index bc129aaf1a16f849a917e8c55ad34f96abfbf18b..4ffc7875944ab70693d6aebf5b7474190533814f 100644
--- a/var/spack/repos/builtin/packages/fftw/package.py
+++ b/var/spack/repos/builtin/packages/fftw/package.py
@@ -42,7 +42,7 @@ class Fftw(Package):
     variant('float', default=True, description='Produces a single precision version of the library')
     variant('long_double', default=True, description='Produces a long double precision version of the library')
     variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)')
-
+    variant('openmp', default=False, description="Enable OpenMP support.")
     variant('mpi', default=False, description='Activate MPI support')
 
     depends_on('mpi', when='+mpi')
@@ -52,8 +52,15 @@ class Fftw(Package):
     def install(self, spec, prefix):
         options = ['--prefix=%s' % prefix,
                    '--enable-shared',
-                   '--enable-threads',
-                   '--enable-openmp']
+                   '--enable-threads']
+    # Add support for OpenMP
+        if '+openmp' in spec:
+            # Note: Apple's Clang does not support OpenMP.
+            if spec.satisfies('%clang'):
+              ver = str(self.compiler.version)
+              if ver.endswith('-apple'):
+                raise InstallError("Apple's clang does not support OpenMP")
+            options.append('--enable-openmp')
         if not self.compiler.f77 or not self.compiler.fc:
             options.append("--disable-fortran")
         if '+mpi' in spec:
diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py
index b346fe80c2842d064a584ff83bb8f5978eadcebd..0e9e8fc099cda417966d066e80c22bd2aba5bb2f 100644
--- a/var/spack/repos/builtin/packages/gdb/package.py
+++ b/var/spack/repos/builtin/packages/gdb/package.py
@@ -34,6 +34,7 @@ class Gdb(Package):
     homepage = "https://www.gnu.org/software/gdb"
     url = "http://ftp.gnu.org/gnu/gdb/gdb-7.10.tar.gz"
 
+    version('7.11', 'f585059252836a981ea5db9a5f8ce97f')
     version('7.10.1', 'b93a2721393e5fa226375b42d567d90b')
     version('7.10', 'fa6827ad0fd2be1daa418abb11a54d86')
     version('7.9.1', 'f3b97de919a9dba84490b2e076ec4cb0')
diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py
index 67ead5f9416b8ece41d191c6cc799232f5f8fcda..a3fc3f79ebe9d6ea095336b366fdb716c050985b 100644
--- a/var/spack/repos/builtin/packages/glib/package.py
+++ b/var/spack/repos/builtin/packages/glib/package.py
@@ -1,4 +1,5 @@
 from spack import *
+import sys
 
 class Glib(Package):
     """The GLib package contains a low-level libraries useful for
@@ -12,6 +13,8 @@ class Glib(Package):
 
     depends_on("libffi")
     depends_on("zlib")
+    depends_on("pkg-config")
+    depends_on('gettext', sys.platform=='darwin')
 
     def install(self, spec, prefix):
         configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py
index 9d759303cbb0301fe83b04ae97d9c1eec1b7de89..5f659c56dfcc861c2888b256a62cf4fe24857ecb 100644
--- a/var/spack/repos/builtin/packages/gmsh/package.py
+++ b/var/spack/repos/builtin/packages/gmsh/package.py
@@ -63,6 +63,11 @@ def install(self, spec, prefix):
         build_directory = join_path(self.stage.path, 'spack-build')
         source_directory = self.stage.source_path
 
+        options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
+
+        # Prevent GMsh from using its own strange directory structure on OSX
+        options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF')
+
         if '+shared' in spec:
             options.extend(['-DENABLE_BUILD_SHARED:BOOL=ON',
                             '-DENABLE_BUILD_DYNAMIC:BOOL=ON'])  # Builds dynamic executable and installs shared library
diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py
index 470969832f19fe2f1335d49938aa25843e41f642..cae46d3301758c779e028cfe9d1a6fb1f68db36c 100644
--- a/var/spack/repos/builtin/packages/hdf5/package.py
+++ b/var/spack/repos/builtin/packages/hdf5/package.py
@@ -24,6 +24,7 @@
 ##############################################################################
 
 from spack import *
+import shutil
 
 
 class Hdf5(Package):
@@ -114,14 +115,16 @@ def install(self, spec, prefix):
             # this is not actually a problem.
             extra_args.extend([
                 "--enable-parallel",
-                "CC=%s" % spec['mpi'].prefix.bin + "/mpicc",
+                "CC=%s" % join_path(spec['mpi'].prefix.bin, "mpicc"),
             ])
 
             if '+cxx' in spec:
-                extra_args.append("CXX=%s" % spec['mpi'].prefix.bin + "/mpic++")
+                extra_args.append("CXX=%s" % join_path(spec['mpi'].prefix.bin,
+                                                       "mpic++"))
 
             if '+fortran' in spec:
-                extra_args.append("FC=%s" % spec['mpi'].prefix.bin + "/mpifort")
+                extra_args.append("FC=%s" % join_path(spec['mpi'].prefix.bin,
+                                                      "mpifort"))
 
         if '+szip' in spec:
             extra_args.append("--with-szlib=%s" % spec['szip'].prefix)
@@ -138,6 +141,58 @@ def install(self, spec, prefix):
             *extra_args)
         make()
         make("install")
+        self.check_install(spec)
+
+    def check_install(self, spec):
+        "Build and run a small program to test the installed HDF5 library"
+        print "Checking HDF5 installation..."
+        checkdir = "spack-check"
+        with working_dir(checkdir, create=True):
+            source = r"""
+#include <hdf5.h>
+#include <assert.h>
+#include <stdio.h>
+int main(int argc, char **argv) {
+  unsigned majnum, minnum, relnum;
+  herr_t herr = H5get_libversion(&majnum, &minnum, &relnum);
+  assert(!herr);
+  printf("HDF5 version %d.%d.%d %u.%u.%u\n", H5_VERS_MAJOR, H5_VERS_MINOR,
+         H5_VERS_RELEASE, majnum, minnum, relnum);
+  return 0;
+}
+"""
+            expected = """\
+HDF5 version {version} {version}
+""".format(version=str(spec.version))
+            with open("check.c", 'w') as f:
+                f.write(source)
+            if '+mpi' in spec:
+                cc = which(join_path(spec['mpi'].prefix.bin, "mpicc"))
+            else:
+                cc = which('cc')
+            # TODO: Automate these path and library settings
+            cc('-c', "-I%s" % join_path(spec.prefix, "include"), "check.c")
+            cc('-o', "check", "check.o",
+               "-L%s" % join_path(spec.prefix, "lib"), "-lhdf5",
+               "-lz")
+            try:
+                check = Executable('./check')
+                output = check(return_output=True)
+            except:
+                output = ""
+            success = output == expected
+            if not success:
+                print "Produced output does not match expected output."
+                print "Expected output:"
+                print '-'*80
+                print expected
+                print '-'*80
+                print "Produced output:"
+                print '-'*80
+                print output
+                print '-'*80
+                raise RuntimeError("HDF5 install check failed")
+        shutil.rmtree(checkdir)
 
     def url_for_version(self, version):
         v = str(version)
diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py
index ab7205646ee18e2a99cdd37ed317a009b3c6381f..a461a7482caf1fe29447e447c61f6af25d3c7d5e 100644
--- a/var/spack/repos/builtin/packages/hwloc/package.py
+++ b/var/spack/repos/builtin/packages/hwloc/package.py
@@ -17,6 +17,7 @@ class Hwloc(Package):
     list_url = "http://www.open-mpi.org/software/hwloc/"
     list_depth = 3
 
+    version('1.11.3', 'c1d36a9de6028eac1d18ea4782ef958f')
     version('1.11.2', 'e4ca55c2a5c5656da4a4e37c8fc51b23')
     version('1.11.1', 'feb4e416a1b25963ed565d8b42252fdc')
     version('1.9',    '1f9f9155682fe8946a97c08896109508')
diff --git a/var/spack/repos/builtin/packages/jpeg/package.py b/var/spack/repos/builtin/packages/jpeg/package.py
index 87820467dba31a9dd5e9250b92e9704315e5b829..2f15e59ad4e7d1272449e8c3fc2da7e557cf293a 100644
--- a/var/spack/repos/builtin/packages/jpeg/package.py
+++ b/var/spack/repos/builtin/packages/jpeg/package.py
@@ -1,14 +1,19 @@
 from spack import *
 
 class Jpeg(Package):
-    """jpeg library"""
+    """libjpeg is a widely used free library with functions for handling the
+    JPEG image data format. It implements a JPEG codec (encoding and decoding)
+    alongside various utilities for handling JPEG data."""
+
     homepage = "http://www.ijg.org"
-    url      = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz"
+    url      = "http://www.ijg.org/files/jpegsrc.v9b.tar.gz"
 
+    version('9b', '6a9996ce116ec5c52b4870dbcd6d3ddb')
     version('9a', '3353992aecaee1805ef4109aadd433e7')
 
     def install(self, spec, prefix):
         configure("--prefix=%s" % prefix)
 
         make()
+        make("test")
         make("install")
diff --git a/var/spack/repos/builtin/packages/kripke/package.py b/var/spack/repos/builtin/packages/kripke/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d067ea44d69969aecd5e8de9aba0fe62d43080c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/kripke/package.py
@@ -0,0 +1,32 @@
+from spack import *
+
+class Kripke(Package):
+    """Kripke is a simple, scalable, 3D Sn deterministic particle
+       transport proxy/mini app.
+    """
+    homepage = "https://codesign.llnl.gov/kripke.php"
+    url      = "https://codesign.llnl.gov/downloads/kripke-openmp-1.1.tar.gz"
+
+    version('1.1', '7fe6f2b26ed983a6ce5495ab701f85bf')
+
+    variant('mpi',    default=True, description='Build with MPI.')
+    variant('openmp', default=True, description='Build with OpenMP enabled.')
+
+    depends_on('mpi', when="+mpi")
+
+    def install(self, spec, prefix):
+        with working_dir('build', create=True):
+            def enabled(variant):
+                return (1 if variant in spec else 0)
+
+            cmake('-DCMAKE_INSTALL_PREFIX:PATH=.',
+                  '-DENABLE_OPENMP=%d' % enabled('+openmp'),
+                  '-DENABLE_MPI=%d' % enabled('+mpi'),
+                  '..',
+                  *std_cmake_args)
+            make()
+
+            # Kripke does not provide install target, so we have to copy
+            # things into place.
+            mkdirp(prefix.bin)
+            install('kripke', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/libtermkey/package.py b/var/spack/repos/builtin/packages/libtermkey/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..7f25edaf76c6c2ccf599cb1b41844f1c288794b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libtermkey/package.py
@@ -0,0 +1,17 @@
+from spack import *
+
+class Libtermkey(Package):
+    """Easy keyboard entry processing for terminal programs"""
+    homepage = "http://www.leonerd.org.uk/code/libtermkey/"
+    url      = "http://www.leonerd.org.uk/code/libtermkey/libtermkey-0.18.tar.gz"
+
+    version('0.18' , '3be2e3e5a851a49cc5e8567ac108b520')
+    version('0.17' , '20edb99e0d95ec1690fe90e6a555ae6d')
+    version('0.16' , '7a24b675aaeb142d30db28e7554987d4')
+    version('0.15b', '27689756e6c86c56ae454f2ac259bc3d')
+    version('0.14' , 'e08ce30f440f9715c459060e0e048978')
+
+
+    def install(self, spec, prefix):
+        make()
+        make("install", "PREFIX=" + prefix)
diff --git a/var/spack/repos/builtin/packages/libuv/package.py b/var/spack/repos/builtin/packages/libuv/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..eace94d1a68bf81b33d452f4bf04f7fbc80524a1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libuv/package.py
@@ -0,0 +1,21 @@
+from spack import *
+
+class Libuv(Package):
+    """Multi-platform library with a focus on asynchronous IO"""
+    homepage = "http://libuv.org"
+    url      = "https://github.com/libuv/libuv/archive/v1.9.0.tar.gz"
+
+    version('1.9.0', '14737f9c76123a19a290dabb7d1cd04c')
+
+    depends_on('automake')
+    depends_on('autoconf')
+    depends_on('libtool')
+
+    def install(self, spec, prefix):
+        bash = which("bash")
+        bash('autogen.sh')
+        configure('--prefix=%s' % prefix)
+
+        make()
+        make("check")
+        make("install")
diff --git a/var/spack/repos/builtin/packages/libvterm/package.py b/var/spack/repos/builtin/packages/libvterm/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..3212f6550d25289d7c86446c1b4414923ddb3243
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libvterm/package.py
@@ -0,0 +1,12 @@
+from spack import *
+
+class Libvterm(Package):
+    """An abstract library implementation of a terminal emulator"""
+    homepage = "http://www.leonerd.org.uk/code/libvterm/"
+    url      = "http://www.leonerd.org.uk/code/libvterm/libvterm-0+bzr681.tar.gz"
+
+    version('681', '7a4325a7350b7092245c04e8ee185ac3')
+
+    def install(self, spec, prefix):
+        make()
+        make("install", "PREFIX=" + prefix)
diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py
index d7d94c4546e33a3dd6f017b2c45b1ae8c6f8822f..b2543be5da7dca8e963aa90c504213f4cee61ad7 100644
--- a/var/spack/repos/builtin/packages/libxcb/package.py
+++ b/var/spack/repos/builtin/packages/libxcb/package.py
@@ -13,6 +13,7 @@ class Libxcb(Package):
     version('1.11.1', '118623c15a96b08622603a71d8789bf3')
     depends_on("python")
     depends_on("xcb-proto")
+    depends_on("pkg-config")
 
     # depends_on('pthread')    # Ubuntu: apt-get install libpthread-stubs0-dev
     # depends_on('xau')        # Ubuntu: apt-get install libxau-dev
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index 5d68f203516f548a03475434f5a4533ec02a210c..2179086fe525edb812905fa285e2e63e03872060 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -43,6 +43,8 @@ class Mpich(Package):
     version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
 
     variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
+    variant('pmi', default=True, description='Build with PMI support')
+    variant('hydra', default=True, description='Build the hydra process manager')
 
     provides('mpi@:3.0', when='@3:')
     provides('mpi@:1.3', when='@1:')
@@ -62,6 +64,8 @@ def setup_dependent_package(self, module, dep_spec):
 
     def install(self, spec, prefix):
         config_args = ["--prefix=" + prefix,
+                       "--with-pmi=" + ("yes" if '+pmi' in spec else 'no'),
+                       "--with-pm=" + ('hydra' if '+hydra' in spec else 'no'),
                        "--enable-shared"]
 
         # Variants
diff --git a/var/spack/repos/builtin/packages/msgpack-c/package.py b/var/spack/repos/builtin/packages/msgpack-c/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..a363bc89be4dad8cf9c36871167bfeedf4c02b55
--- /dev/null
+++ b/var/spack/repos/builtin/packages/msgpack-c/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class MsgpackC(Package):
+    """A small, fast binary interchange format convertible to/from JSON"""
+    homepage = "http://www.msgpack.org"
+    url      = "https://github.com/msgpack/msgpack-c/archive/cpp-1.4.1.tar.gz"
+
+    version('1.4.1', 'e2fd3a7419b9bc49e5017fdbefab87e0')
+
+    def install(self, spec, prefix):
+        cmake('.', *std_cmake_args)
+
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/nccmp/package.py b/var/spack/repos/builtin/packages/nccmp/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..72e86831c67f31147595fee5c92988980e9e2095
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nccmp/package.py
@@ -0,0 +1,23 @@
+from spack import *
+
+class Nccmp(Package):
+    """Compare NetCDF Files"""
+    homepage = "http://nccmp.sourceforge.net/"
+    url      = "http://downloads.sourceforge.net/project/nccmp/nccmp-1.8.2.0.tar.gz"
+
+    version('1.8.2.0', '81e6286d4413825aec4327e61a28a580')
+
+    depends_on('netcdf')
+
+    def install(self, spec, prefix):
+        # Configure says: F90 and F90FLAGS are replaced by FC and
+        # FCFLAGS respectively in this configure, please unset
+        # F90/F90FLAGS and set FC/FCFLAGS instead and rerun configure
+        # again.
+        env.pop('F90', None)
+        env.pop('F90FLAGS', None)
+
+        configure('--prefix=%s' % prefix)
+        make()
+        make("check")
+        make("install")
diff --git a/var/spack/repos/builtin/packages/nco/package.py b/var/spack/repos/builtin/packages/nco/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..3a9aeaa656149c5bccd99f0147065059e8a7679d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nco/package.py
@@ -0,0 +1,30 @@
+from spack import *
+import os
+
+class Nco(Package):
+    """The NCO toolkit manipulates and analyzes data stored in
+    netCDF-accessible formats"""
+
+    homepage = "https://sourceforge.net/projects/nco"
+    url      = "https://github.com/nco/nco/archive/4.5.5.tar.gz"
+
+    version('4.5.5', '9f1f1cb149ad6407c5a03c20122223ce')
+
+    # See "Compilation Requirements" at:
+    # http://nco.sourceforge.net/#bld
+
+    depends_on('netcdf')
+    depends_on('antlr@2.7.7+cxx')    # (required for ncap2)
+    depends_on('gsl')            #  (desirable for ncap2)
+    depends_on('udunits2')       # (allows dimensional unit transformations)
+    # depends_on('opendap')      # (enables network transparency), 
+
+    def install(self, spec, prefix):
+        opts = [
+            '--prefix=%s' % prefix,
+            '--disable-openmp',    # TODO: Make this a variant
+            '--disable-dap',        # TODO: Make this a variant
+            '--disable-esmf']
+        configure(*opts)
+        make()
+        make("install")
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index 99649da9cadb6e8225d7d38bcd5f6464beff158e..cd8e3755ce0a0278a0a46449c573e9ce68d6a176 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -1,20 +1,21 @@
 from spack import *
-import sys
+from spack.package_test import *
 import os
-import shutil
+
 
 class Openblas(Package):
     """OpenBLAS: An optimized BLAS library"""
     homepage = "http://www.openblas.net"
     url      = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
 
+    version('0.2.18', '805e7f660877d588ea7e3792cda2ee65')
     version('0.2.17', '664a12807f2a2a7cda4781e3ab2ae0e1')
     version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc')
     version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
 
-    variant('shared', default=True, description="Build shared libraries as well as static libs.")
-    variant('openmp', default=True, description="Enable OpenMP support.")
-    variant('fpic', default=True, description="Build position independent code")
+    variant('shared', default=True,  description="Build shared libraries as well as static libs.")  # NOQA: ignore=E501
+    variant('openmp', default=False, description="Enable OpenMP support.")
+    variant('fpic',   default=True,  description="Build position independent code")  # NOQA: ignore=E501
 
     # virtual dependency
     provides('blas')
@@ -45,8 +46,13 @@ def install(self, spec, prefix):
             make_defs += ['BUILD_LAPACK_DEPRECATED=1']
 
         # Add support for OpenMP
-        # Note: Make sure your compiler supports OpenMP
         if '+openmp' in spec:
+            # Openblas (as of 0.2.18) hardcoded that OpenMP cannot
+            # be used with any (!) compiler named clang, bummer.
+            if spec.satisfies('%clang'):
+                raise InstallError('OpenBLAS does not support ',
+                                   'OpenMP with clang!')
+
             make_defs += ['USE_OPENMP=1']
 
         make_args = make_defs + make_targets
@@ -62,84 +68,49 @@ def install(self, spec, prefix):
             symlink('libopenblas.a', 'blas.a')
             symlink('libopenblas.a', 'libblas.a')
             if '+shared' in spec:
-                symlink('libopenblas.%s' % dso_suffix, 'libblas.%s' % dso_suffix)
+                symlink('libopenblas.%s' % dso_suffix,
+                        'libblas.%s' % dso_suffix)
 
         # Lapack virtual package should provide liblapack.a
         with working_dir(prefix.lib):
             symlink('libopenblas.a', 'liblapack.a')
             if '+shared' in spec:
-                symlink('libopenblas.%s' % dso_suffix, 'liblapack.%s' % dso_suffix)
+                symlink('libopenblas.%s' % dso_suffix,
+                        'liblapack.%s' % dso_suffix)
 
         # Openblas may pass its own test but still fail to compile Lapack
-        # symbols. To make sure we get working Blas and Lapack, do a small test.
+        # symbols. To make sure we get working Blas and Lapack, do a small
+        # test.
         self.check_install(spec)
 
-
     def setup_dependent_package(self, module, dspec):
         # This is WIP for a prototype interface for virtual packages.
         # We can update this as more builds start depending on BLAS/LAPACK.
-        libdir = find_library_path('libopenblas.a', self.prefix.lib64, self.prefix.lib)
+        libdir = find_library_path('libopenblas.a',
+                                   self.prefix.lib64,
+                                   self.prefix.lib)
 
         self.spec.blas_static_lib   = join_path(libdir, 'libopenblas.a')
         self.spec.lapack_static_lib = self.spec.blas_static_lib
 
         if '+shared' in self.spec:
-            self.spec.blas_shared_lib   = join_path(libdir, 'libopenblas.%s' % dso_suffix)
+            self.spec.blas_shared_lib   = join_path(libdir, 'libopenblas.%s' %
+                                                    dso_suffix)
             self.spec.lapack_shared_lib = self.spec.blas_shared_lib
 
     def check_install(self, spec):
-        "Build and run a small program to test that we have Lapack symbols"
-        print "Checking Openblas installation..."
-        checkdir = "spack-check"
-        with working_dir(checkdir, create=True):
-            source = r"""
-#include <cblas.h>
-#include <stdio.h>
-int main(void) {
-int i=0;
-double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
-double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
-double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
-cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
-            3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
-for (i = 0; i < 9; i++)
-  printf("%f\n", C[i]);
-return 0;
-}
-"""
-            expected = """\
-11.000000
--9.000000
-5.000000
--9.000000
-21.000000
--1.000000
-5.000000
--1.000000
-3.000000
-"""
-            with open("check.c", 'w') as f:
-                f.write(source)
-            cc = which('cc')
-            # TODO: Automate these path and library settings
-            cc('-c', "-I%s" % join_path(spec.prefix, "include"), "check.c")
-            cc('-o', "check", "check.o",
-               "-L%s" % join_path(spec.prefix, "lib"), "-llapack", "-lblas", "-lpthread")
-            try:
-                check = Executable('./check')
-                output = check(return_output=True)
-            except:
-                output = ""
-            success = output == expected
-            if not success:
-                print "Produced output does not match expected output."
-                print "Expected output:"
-                print '-'*80
-                print expected
-                print '-'*80
-                print "Produced output:"
-                print '-'*80
-                print output
-                print '-'*80
-                raise RuntimeError("Openblas install check failed")
-        shutil.rmtree(checkdir)
+        source_file = join_path(os.path.dirname(self.module.__file__),
+                                'test_cblas_dgemm.c')
+        blessed_file = join_path(os.path.dirname(self.module.__file__),
+                                 'test_cblas_dgemm.output')
+
+        include_flags = ["-I%s" % join_path(spec.prefix, "include")]
+        link_flags = ["-L%s" % join_path(spec.prefix, "lib"),
+                      "-llapack",
+                      "-lblas",
+                      "-lpthread"]
+        if '+openmp' in spec:
+            link_flags.extend([self.compiler.openmp_flag])
+
+        output = compile_c_and_execute(source_file, include_flags, link_flags)
+        compare_output_file(output, blessed_file)
diff --git a/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c
index 634e99d20b5c7bf8c88ae29df8f18e14220debc1..2cb90fb8830c56a0089f3c027c4e114acc06e4f5 100644
--- a/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c
+++ b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.c
@@ -1,13 +1,49 @@
 #include <cblas.h>
 #include <stdio.h>
+
+double m[] = {
+  3, 1, 3,
+  1, 5, 9,
+  2, 6, 5
+};
+
+double x[] = {
+  -1, 3, -3
+};
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+     void dgesv_(int *n, int *nrhs,  double *a,  int  *lda,
+           int *ipivot, double *b, int *ldb, int *info);
+
+#ifdef __cplusplus
+}
+#endif
+
 int main(void) {
-int i=0;
-double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
-double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
-double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
-cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
-            3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
-for (i = 0; i < 9; i++)
-  printf("%f\n", C[i]);
-return 0;
+  int i;
+  // blas:
+  double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
+  double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
+  double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
+  cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
+              3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
+  for (i = 0; i < 9; i++)
+    printf("%f\n", C[i]);
+
+  // lapack:
+  int ipiv[3];
+  int j;
+  int info;
+  int n = 1;
+  int nrhs = 1;
+  int lda = 3;
+  int ldb = 3;
+  dgesv_(&n,&nrhs, &m[0], &lda, ipiv, &x[0], &ldb, &info);
+  for (i=0; i<3; ++i)
+    printf("%5.1f\n", x[i]);
+
+  return 0;
 }
diff --git a/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output
index b8316d747710758fd25521afe496cb89b6eaf336..01404462c4bb5fd40ae18f428d96e0a88ccf538f 100644
--- a/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output
+++ b/var/spack/repos/builtin/packages/openblas/test_cblas_dgemm.output
@@ -7,3 +7,6 @@
 5.000000
 -1.000000
 3.000000
+ -0.3
+  3.0
+ -3.0
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index 776fb6eeaaef807f3d9d477dc4bb94abb51e9dc6..c656f78dab8e22e7db6cda79c50f9a7e32873ac1 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -1,6 +1,8 @@
-from spack import *
 import os
 
+from spack import *
+
+
 class Openmpi(Package):
     """Open MPI is a project combining technologies and resources from
        several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI)
@@ -26,16 +28,26 @@ class Openmpi(Package):
     patch('configure.patch', when="@1.10.0:1.10.1")
 
     variant('psm', default=False, description='Build support for the PSM library.')
+    variant('psm2', default=False, description='Build support for the Intel PSM2 library.')
+    variant('pmi', default=False, description='Build support for PMI-based launchers')
     variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
+    variant('mxm', default=False, description='Build Mellanox Messaging support')
+
+    variant('thread_multiple', default=False, description='Enable MPI_THREAD_MULTIPLE support')
 
-    # TODO : variant support for other schedulers is missing
+    # TODO : variant support for alps, loadleveler  is missing
     variant('tm', default=False, description='Build TM (Torque, PBSPro, and compatible) support')
+    variant('slurm', default=False, description='Build SLURM scheduler component')
+
+    variant('sqlite3', default=False, description='Build sqlite3 support')
+
+    # TODO : support for CUDA is missing
 
     provides('mpi@:2.2', when='@1.6.5')
     provides('mpi@:3.0', when='@1.7.5:')
 
     depends_on('hwloc')
-
+    depends_on('sqlite', when='+sqlite3')
 
     def url_for_version(self, version):
         return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
@@ -53,27 +65,35 @@ def setup_dependent_package(self, module, dep_spec):
         self.spec.mpifc  = join_path(self.prefix.bin, 'mpif90')
         self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
 
+    @property
+    def verbs(self):
+        # Up through version 1.6, this option was previously named --with-openib
+        if self.spec.satisfies('@:1.6'):
+            return 'openib'
+        # In version 1.7, it was renamed to be --with-verbs
+        elif self.spec.satisfies('@1.7:'):
+            return 'verbs'
 
     def install(self, spec, prefix):
         config_args = ["--prefix=%s" % prefix,
                        "--with-hwloc=%s" % spec['hwloc'].prefix,
                        "--enable-shared",
                        "--enable-static"]
-
-        # Variants
-        if '+tm' in spec:
-            config_args.append("--with-tm")  # necessary for Torque support
-
-        if '+psm' in spec:
-            config_args.append("--with-psm")
-
-        if '+verbs' in spec:
-            # Up through version 1.6, this option was previously named --with-openib
-            if spec.satisfies('@:1.6'):
-                config_args.append("--with-openib")
-            # In version 1.7, it was renamed to be --with-verbs
-            elif spec.satisfies('@1.7:'):
-                config_args.append("--with-verbs")
+        # Variant based arguments
+        config_args.extend([
+            # Schedulers
+            '--with-tm' if '+tm' in spec else '--without-tm',
+            '--with-slurm' if '+slurm' in spec else '--without-slurm',
+            # Fabrics
+            '--with-psm' if '+psm' in spec else '--without-psm',
+            '--with-psm2' if '+psm2' in spec else '--without-psm2',
+            ('--with-%s' % self.verbs) if '+verbs' in spec else ('--without-%s' % self.verbs),
+            '--with-mxm' if '+mxm' in spec else '--without-mxm',
+            # Other options
+            '--enable-mpi-thread-multiple' if '+thread_multiple' in spec else '--disable-mpi-thread-multiple',
+            '--with-pmi' if '+pmi' in spec else '--without-pmi',
+            '--with-sqlite3' if '+sqlite3' in spec else '--without-sqlite3'
+        ])
 
         # TODO: use variants for this, e.g. +lanl, +llnl, etc.
         # use this for LANL builds, but for LLNL builds, we need:
@@ -81,9 +101,6 @@ def install(self, spec, prefix):
         if self.version == ver("1.6.5") and '+lanl' in spec:
             config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
 
-        # TODO: Spack should make it so that you can't actually find
-        # these compilers if they're "disabled" for the current
-        # compiler configuration.
         if not self.compiler.f77 and not self.compiler.fc:
             config_args.append("--enable-mpi-fortran=no")
 
diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py
index 8c71bcb7c3d3718b65e67df6ee8b8702e6f724cb..bcd77351aa6012b093b8afcd6a4af5f7310d0f5d 100644
--- a/var/spack/repos/builtin/packages/openspeedshop/package.py
+++ b/var/spack/repos/builtin/packages/openspeedshop/package.py
@@ -1,5 +1,5 @@
 ################################################################################
-# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
 #
 # This program is free software; you can redistribute it and/or modify it under
 # the terms of the GNU General Public License as published by the Free Software
@@ -28,20 +28,15 @@ class Openspeedshop(Package):
     as open source code primarily under LGPL.
     """
 
-
     homepage = "http://www.openspeedshop.org"
-    url      = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.2/openspeedshop-2.2.tar.gz/download"
+    url		= "https://github.com/OpenSpeedShop"
     version('2.2', '16cb051179c2038de4e8a845edf1d573')
+    # Use when the git repository is available
+    version('2.2', branch='master', git='https://github.com/OpenSpeedShop/openspeedshop.git')
 
-    #homepage = "http://www.openspeedshop.org"
-    #url      = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.1/openspeedshop-2.1.tar.gz/download"
-    #version('2.1', 'bdaa57c1a0db9d0c3e0303fd8496c507')
-
-    # optional mirror template
-    #url      = "file:/g/g24/jeg/openspeedshop-2.1.tar.gz"
-    #version('2.1', '64ee17166519838c7b94a1adc138e94f')
-
-
+    # Optional mirror template
+    #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.2.tar.gz"
+    #version('2.2', '643337740dc6c2faca60f42d3620b0e1')
 
     parallel = False
 
@@ -51,11 +46,17 @@ class Openspeedshop(Package):
     variant('frontend', default=False, description="build only the front-end tool using the runtime_dir to point to the target build.")
     variant('cuda', default=False, description="build with cuda packages included.")
     variant('ptgf', default=False, description="build with the PTGF based gui package enabled.")
-    variant('intelmic', default=False, description="build for the Intel MIC platform.")
-    variant('cray', default=False, description="build for Cray platforms.")
-    variant('bluegene', default=False, description="build for Cray platforms.")
     variant('rtfe', default=False, description="build for generic cluster platforms that have different processors on the fe and be nodes.")
 
+    # MPI variants
+    variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.")
+    variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.")
+    variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.")
+    variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.")
+    variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.")
+    variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
+
+    depends_on("cmake@3.0.2")
     # Dependencies for openspeedshop that are common to all the variants of the OpenSpeedShop build
     depends_on("bison")
     depends_on("flex")
@@ -63,8 +64,8 @@ class Openspeedshop(Package):
     depends_on("libelf")
     depends_on("libdwarf")
     depends_on("sqlite")
-    depends_on("boost@1.50.0")
-    depends_on("dyninst@8.2.1")
+    depends_on("boost@1.50.0:")
+    depends_on("dyninst@9.1.0")
     depends_on("python")
     depends_on("qt@3.3.8b+krellpatch")
 
@@ -72,15 +73,78 @@ class Openspeedshop(Package):
     depends_on("libunwind", when='+offline')
     depends_on("papi", when='+offline')
     depends_on("libmonitor+krellpatch", when='+offline')
-    #depends_on("openmpi+krelloptions", when='+offline')
-    #depends_on("openmpi", when='+offline')
-    #depends_on("mpich", when='+offline')
+    depends_on("openmpi", when='+offline+openmpi')
+    depends_on("mpich", when='+offline+mpich')
+    depends_on("mpich2", when='+offline+mpich2')
+    depends_on("mvapich2", when='+offline+mvapich2')
+    depends_on("mvapich", when='+offline+mvapich')
+    depends_on("mpt", when='+offline+mpt')
 
     # Dependencies only for the openspeedshop cbtf package.
     depends_on("cbtf", when='+cbtf')
     depends_on("cbtf-krell", when='+cbtf')
-    depends_on("cbtf-argonavis", when='+cbtf')
-    depends_on("mrnet@4.1.0:+lwthreads", when='+cbtf')
+    depends_on("cbtf-argonavis", when='+cbtf+cuda')
+    depends_on("mrnet@5.0.1:+lwthreads+krellpatch", when='+cbtf')
+
+    def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
+        # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+
+        compile_flags="-O2 -g"
+        BuildTypeOptions = []
+        # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+        for word in cmakeOptions[:]:
+            if word.startswith('-DCMAKE_BUILD_TYPE'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_CXX_FLAGS'):
+                cmakeOptions.remove(word)
+            if word.startswith('-DCMAKE_C_FLAGS'):
+                cmakeOptions.remove(word)
+        BuildTypeOptions.extend([
+                 '-DCMAKE_BUILD_TYPE=None',
+                 '-DCMAKE_CXX_FLAGS=%s'         % compile_flags,
+                 '-DCMAKE_C_FLAGS=%s'           % compile_flags
+        ])
+
+        cmakeOptions.extend(BuildTypeOptions)
+
+    def set_mpi_cmakeOptions(self, spec, cmakeOptions):
+        # Appends to cmakeOptions the options that will enable the appropriate MPI implementations
+ 
+        MPIOptions = []
+
+        # openmpi
+        if '+openmpi' in spec:
+            MPIOptions.extend([
+                 '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix
+            ])
+        # mpich
+        if '+mpich' in spec:
+            MPIOptions.extend([
+                 '-DMPICH_DIR=%s' % spec['mpich'].prefix
+            ])
+        # mpich2
+        if '+mpich2' in spec:
+            MPIOptions.extend([
+                 '-DMPICH2_DIR=%s' % spec['mpich2'].prefix
+            ])
+        # mvapich
+        if '+mvapich' in spec:
+            MPIOptions.extend([
+                 '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix
+            ])
+        # mvapich2
+        if '+mvapich2' in spec:
+            MPIOptions.extend([
+                 '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix
+            ])
+        # mpt
+        if '+mpt' in spec:
+            MPIOptions.extend([
+                 '-DMPT_DIR=%s' % spec['mpt'].prefix
+            ])
+
+        cmakeOptions.extend(MPIOptions)
+
 
     def install(self, spec, prefix):
 
@@ -100,51 +164,118 @@ def install(self, spec, prefix):
             instrumentor_setting = "offline"
             if '+runtime' in spec:
                 with working_dir('build_runtime', create=True):
-                    cmake('..',
-                          '-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
-                          '-DCMAKE_LIBRARY_PATH=%s'     % prefix.lib64,
-                          '-DINSTRUMENTOR=%s'           % instrumentor_setting,
-                          '-DLIBMONITOR_DIR=%s'         % spec['libmonitor'].prefix,
-                          '-DLIBUNWIND_DIR=%s'          % spec['libunwind'].prefix,
-                          '-DPAPI_DIR=%s'               % spec['papi'].prefix,
-                          *std_cmake_args)
+
+                    cmakeOptions = []
+                    cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
+                                         '-DCMAKE_LIBRARY_PATH=%s'     % prefix.lib64,
+                                         '-DINSTRUMENTOR=%s'           % instrumentor_setting,
+                                         '-DLIBMONITOR_DIR=%s'         % spec['libmonitor'].prefix,
+                                         '-DLIBUNWIND_DIR=%s'          % spec['libunwind'].prefix,
+                                         '-DPAPI_DIR=%s'               % spec['papi'].prefix 
+                                        ])
+ 
+                    # Add any MPI implementations coming from variant settings
+                    self.set_mpi_cmakeOptions(spec, cmakeOptions)
+                    cmakeOptions.extend(std_cmake_args)
+
+                    # Adjust the build options to the favored ones for this build
+                    self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+                    cmake('..', *cmakeOptions)
+
                     make("clean")
                     make()
                     make("install")
             else:
                 cmake_prefix_path = join_path(spec['dyninst'].prefix)
                 with working_dir('build', create=True):
+
                     #python_vers=join_path(spec['python'].version[:2])
                     #'-DOPENMPI_DIR=%s'            % openmpi_prefix_path,
                     #'-DMVAPICH_DIR=%s'            % mvapich_prefix_path,
+                    #'-DMPICH_DIR=%s'              % spec['mpich'].prefix,
+                    #'-DMPICH2_DIR=%s'             % spec['mpich2'].prefix,
+                    #'-DBoost_NO_SYSTEM_PATHS=TRUE',
+                    #'-DBOOST_ROOT=%s'             % spec['boost'].prefix,
+                    #'-DOPENMPI_DIR=%s'            % spec['openmpi'].prefix,
+
                     python_vers='%d.%d' % spec['python'].version[:2]
-                    cmake('..',
-                          '-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
-                          '-DCMAKE_LIBRARY_PATH=%s'     % prefix.lib64,
-                          '-DCMAKE_PREFIX_PATH=%s'      % cmake_prefix_path,
-                          '-DINSTRUMENTOR=%s'           % instrumentor_setting,
-                          '-DBINUTILS_DIR=%s'           % spec['binutils'].prefix,
-                          '-DLIBELF_DIR=%s'             % spec['libelf'].prefix,
-                          '-DLIBDWARF_DIR=%s'           % spec['libdwarf'].prefix,
-                          '-DLIBMONITOR_DIR=%s'         % spec['libmonitor'].prefix,
-                          '-DLIBUNWIND_DIR=%s'          % spec['libunwind'].prefix,
-                          '-DPAPI_DIR=%s'               % spec['papi'].prefix,
-                          '-DSQLITE3_DIR=%s'            % spec['sqlite'].prefix,
-                          '-DQTLIB_DIR=%s'              % spec['qt'].prefix,
-                          '-DPYTHON_EXECUTABLE=%s'      % join_path(spec['python'].prefix + '/bin/python'),
-                          '-DPYTHON_INCLUDE_DIR=%s'     % join_path(spec['python'].prefix.include) + '/python' + python_vers,
-                          '-DPYTHON_LIBRARY=%s'         % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
-                          '-DBoost_NO_SYSTEM_PATHS=TRUE',
-                          '-DBOOST_ROOT=%s'             % spec['boost'].prefix,
-                          '-DDYNINST_DIR=%s'            % spec['dyninst'].prefix,
-                          *std_cmake_args)
+
+                    cmakeOptions = []
+                    cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
+                                         '-DCMAKE_LIBRARY_PATH=%s'     % prefix.lib64,
+                                         '-DCMAKE_PREFIX_PATH=%s'      % cmake_prefix_path,
+                                         '-DINSTRUMENTOR=%s'           % instrumentor_setting,
+                                         '-DBINUTILS_DIR=%s'           % spec['binutils'].prefix,
+                                         '-DLIBELF_DIR=%s'             % spec['libelf'].prefix,
+                                         '-DLIBDWARF_DIR=%s'           % spec['libdwarf'].prefix,
+                                         '-DLIBMONITOR_DIR=%s'         % spec['libmonitor'].prefix,
+                                         '-DLIBUNWIND_DIR=%s'          % spec['libunwind'].prefix,
+                                         '-DPAPI_DIR=%s'               % spec['papi'].prefix,
+                                         '-DSQLITE3_DIR=%s'            % spec['sqlite'].prefix,
+                                         '-DQTLIB_DIR=%s'              % spec['qt'].prefix,
+                                         '-DPYTHON_EXECUTABLE=%s'      % join_path(spec['python'].prefix + '/bin/python'),
+                                         '-DPYTHON_INCLUDE_DIR=%s'     % join_path(spec['python'].prefix.include) + '/python' + python_vers,
+                                         '-DPYTHON_LIBRARY=%s'         % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
+                                         '-DBoost_NO_SYSTEM_PATHS=TRUE',
+                                         '-DBOOST_ROOT=%s'             % spec['boost'].prefix,
+                                         '-DDYNINST_DIR=%s'            % spec['dyninst'].prefix
+                                        ])
+
+                    # Add any MPI implementations coming from variant settings
+                    self.set_mpi_cmakeOptions(spec, cmakeOptions)
+                    cmakeOptions.extend(std_cmake_args)
+
+                    # Adjust the build options to the favored ones for this build
+                    self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+                    cmake('..', *cmakeOptions)
+
                     make("clean")
                     make()
                     make("install")
 
         elif '+cbtf' in spec:
             instrumentor_setting = "cbtf"
+            resolve_symbols = "symtabapi"
             cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + ':' + join_path(spec['dyninst'].prefix)
+            #runtime_platform_cray = "cray"
+            #if '+cray' in spec:
+            #    if '+runtime' in spec:
+            #                   #-DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_INSTALL_DIR} \
+            #        with working_dir('build_cbtf_cray_runtime', create=True):
+            #            python_vers='%d.%d' % spec['python'].version[:2]
+            #            cmake('..',
+            #                  '-DCMAKE_INSTALL_PREFIX=%s'   % prefix,
+            #                  '-DCMAKE_LIBRARY_PATH=%s'     % prefix.lib64,
+            #                  '-DRUNTIME_PLATFORM=%s'		% runtime_platform_cray,
+            #                  '-DCMAKE_PREFIX_PATH=%s'      % cmake_prefix_path,
+            #                  '-DRESOLVE_SYMBOLS=%s'		% resolve_symbols,
+            #                  '-DINSTRUMENTOR=%s'           % instrumentor_setting,
+            #                  '-DCBTF_DIR=%s'			% spec['cbtf'].prefix,
+            #                  '-DCBTF_KRELL_DIR=%s'		% spec['cbtf-krell'].prefix,
+            #                  '-DCBTF_KRELL_CN_RUNTIME_DIR=%s'	% spec['cbtf-krell'].prefix,
+            #                  '-DBINUTILS_DIR=%s'           % spec['binutils'].prefix,
+            #                  '-DLIBELF_DIR=%s'             % spec['libelf'].prefix,
+            #                  '-DLIBDWARF_DIR=%s'           % spec['libdwarf'].prefix,
+            #                  '-DLIBUNWIND_DIR=%s'          % spec['libunwind'].prefix,
+            #                  '-DPAPI_DIR=%s'               % spec['papi'].prefix,
+            #                  '-DDYNINST_DIR=%s'            % spec['dyninst'].prefix,
+            #                  '-DXERCESC_DIR=%s'            % spec['xerces-c'].prefix,
+            #                  '-DMRNET_DIR=%s'              % spec['mrnet'].prefix,
+            #                  '-DBoost_NO_SYSTEM_PATHS=TRUE',
+            #                  '-DBOOST_ROOT=%s'             % spec['boost'].prefix,
+            #                  *std_cmake_args)
+
+            #           make("clean")
+            #           make()
+            #           make("install")
+
+
+            #elif '+mic' in spec:
+            # comment out else and shift over the default case below until arch detection is in
+            #else:
+
             if '+runtime' in spec:
                 with working_dir('build_cbtf_runtime', create=True):
                     python_vers='%d.%d' % spec['python'].version[:2]
@@ -203,14 +334,63 @@ def install(self, spec, prefix):
         #         tbd
 
 
-        #if '+intelmic' in spec:
-        #    with working_dir('build_intelmic_compute', create=True):
-        #         tbd
-        #    with working_dir('build_intelmic_frontend', create=True):
-        #         tbd
 
-        #if '+cray' in spec:
-        #    with working_dir('build_cray_compute', create=True):
-        #         tbd
-        #    with working_dir('build_cray_frontend', create=True):
-        #         tbd
+        #if '+cbtf' in spec:
+        #   if cray build type detected:
+        #        if '+runtime' in spec:
+        #            with working_dir('build_cray_cbtf_compute', create=True):
+        #                tbd
+        #        else:
+        #            with working_dir('build_cray_cbtf_frontend', create=True):
+        #                tbd
+        #            with working_dir('build_cray_osscbtf_frontend', create=True):
+        #                tbd
+        #        fi
+        #    elif '+intelmic' in spec:
+        #        if '+runtime' in spec:
+        #            with working_dir('build_intelmic_cbtf_compute', create=True):
+        #                tbd
+        #        else:
+        #            with working_dir('build_intelmic_cbtf_frontend', create=True):
+        #                tbd
+        #            with working_dir('build_intelmic_osscbtf_frontend', create=True):
+        #        fi
+        #    else
+        #        with working_dir('build_cluster_cbtf', create=True):
+        #             tbd
+        #        with working_dir('build_cluster osscbtf', create=True):
+        #             tbd
+        #    fi
+        #elif '+offline' in spec:
+        #   if cray build type detected:
+        #        if '+runtime' in spec:
+        #            with working_dir('build_cray_ossoff_compute', create=True):
+        #                tbd
+        #        else:
+        #            with working_dir('build_cray_ossoff_frontend', create=True):
+        #                tbd
+        #        fi
+        #    elif '+intelmic' in spec:
+        #        if '+runtime' in spec:
+        #            with working_dir('build_intelmic_ossoff_compute', create=True):
+        #                tbd
+        #        else:
+        #            with working_dir('build_intelmic_ossoff_frontend', create=True):
+        #                tbd
+        #        fi
+        #    elif bgq build type detected:
+        #        if '+runtime' in spec:
+        #            with working_dir('build_bgq_ossoff_compute', create=True):
+        #                tbd
+        #        else:
+        #            with working_dir('build_bgq_ossoff_frontend', create=True):
+        #                tbd
+        #        fi
+        #    else
+        #        with working_dir('build_cluster ossoff', create=True):
+        #             tbd
+        #    fi
+        #fi
+
+
+
diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py
index e6f8cf026bf2cc41f031dcd6dc0451fa217b7f22..1bbd24781e078c11a55f885370c3ce82f5bc6abc 100644
--- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py
+++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py
@@ -11,11 +11,25 @@ class ParallelNetcdf(Package):
     version('1.7.0', '267eab7b6f9dc78c4d0e6def2def3aea4bc7c9f0')
     version('1.6.1', '62a094eb952f9d1e15f07d56e535052604f1ac34')
 
+    variant('cxx', default=True, description='Build the C++ Interface')
+    variant('fortran', default=True, description='Build the Fortran Interface')
+    variant('fpic', default=True, description='Produce position-independent code (for use with shared libraries)')
+
     depends_on("m4")
     depends_on("mpi")
 
+    # See: https://trac.mcs.anl.gov/projects/parallel-netcdf/browser/trunk/INSTALL
     def install(self, spec, prefix):
-        configure("--prefix=%s" % prefix,
-                  "--with-mpi=%s" % spec['mpi'].prefix)
+        args = list()
+        if '+fpic' in spec:
+            args.extend(['CFLAGS=-fPIC', 'CXXFLAGS=-fPIC', 'FFLAGS=-fPIC'])
+        if '~cxx' in spec:
+            args.append('--disable-cxx')
+        if '~fortran' in spec:
+            args.append('--disable-fortran')
+
+        args.extend(["--prefix=%s" % prefix,
+                  "--with-mpi=%s" % spec['mpi'].prefix])
+        configure(*args)
         make()
         make("install")
diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..c94ef7238ba88a523707a422fba9860df32750ce
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class PyArgcomplete(Package):
+    """Bash tab completion for argparse."""
+
+    homepage = "https://pypi.python.org/pypi/argcomplete"
+    url      = "https://pypi.python.org/packages/source/a/argcomplete/argcomplete-1.1.1.tar.gz"
+
+    version('1.1.1', '89a3839096c9f991ad33828e72d21abf')
+
+    extends('python')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ecb5eecee5b794b80ab1ee3ba1048eb85c43fdb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-astroid/package.py
@@ -0,0 +1,22 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyAstroid(Package):
+    homepage = "https://www.astroid.org/"
+    url      = "https://github.com/PyCQA/astroid/archive/astroid-1.4.5.tar.gz"
+
+    version('1.4.5', '7adfc55809908297ef430efe4ea20ac3')
+    version('1.4.4', '8ae6f63f6a2b260bb7f647dafccbc796')
+    version('1.4.3', '4647159de7d4d0c4b1de23ecbfb8e246')
+    version('1.4.2', '677f7965840f375af51b0e86403bee6a')
+    version('1.4.1', 'ed70bfed5e4b25be4292e7fe72da2c02')
+
+    extends('python')
+    depends_on('py-logilab-common')
+    depends_on('py-setuptools')
+    depends_on('py-six')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/repos/builtin/packages/py-genshi/package.py b/var/spack/repos/builtin/packages/py-genshi/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d485c894292b058d0449eb377678c8b33d13467b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-genshi/package.py
@@ -0,0 +1,18 @@
+from spack import version, extends, depends_on
+from spack import Package
+
+
+class PyGenshi(Package):
+    """Python toolkit for generation of output for the web"""
+    homepage = "https://genshi.edgewall.org/"
+    url      = "http://ftp.edgewall.com/pub/genshi/Genshi-0.7.tar.gz"
+
+    version('0.7', '54e64dd69da3ec961f86e686e0848a82')
+    version('0.6.1', '372c368c8931110b0a521fa6091742d7')
+    version('0.6', '604e8b23b4697655d36a69c2d8ef7187')
+
+    extends("python")
+    depends_on("py-setuptools")
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-jinja2/package.py b/var/spack/repos/builtin/packages/py-jinja2/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d92cdd49af2d5cf24e550d0d74cdef63a6da26a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jinja2/package.py
@@ -0,0 +1,27 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyJinja2(Package):
+    """
+    Jinja2 is a template engine written in pure Python. It provides
+    a Django inspired non-XML syntax but supports inline expressions
+    and an optional sandboxed environment.
+    """
+
+    homepage = "http://jinja.pocoo.org/"
+    url      = "https://github.com/pallets/jinja/archive/2.8.tar.gz"
+
+    version('2.8', '4114200650d7630594e3bc70af23f59e')
+    version('2.7.3', '55b87bdc8e585b8b5b86734eefce2621')
+    version('2.7.2', '8e8f226809ae6363009b9296e30adf30')
+    version('2.7.1', '69b6675553c81b1087f95cae7f2179bb')
+    version('2.7', 'ec70433f325051dcedacbb2465028a35')
+
+    extends("python")
+    depends_on("py-setuptools")
+    depends_on("py-markupsafe")
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/repos/builtin/packages/py-logilab-common/package.py b/var/spack/repos/builtin/packages/py-logilab-common/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..a47c4ac0ec4966428fbe6cf0ad9d765206e9ba4a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-logilab-common/package.py
@@ -0,0 +1,18 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyLogilabCommon(Package):
+    """Common modules used by Logilab projects"""
+    homepage = "https://www.logilab.org/project/logilab-common"
+    url      = "https://pypi.python.org/packages/a7/31/1650d23e44794d46935d82b86e73454cc83b814cbe1365260ccce8a2f4c6/logilab-common-1.2.0.tar.gz"
+
+    version('1.2.0', 'f7b51351b7bfe052746fa04c03253c0b')
+
+    extends("python")
+    depends_on("py-setuptools")
+    depends_on("py-six")
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/repos/builtin/packages/py-markupsafe/package.py b/var/spack/repos/builtin/packages/py-markupsafe/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..0a0c3a724a5b739dcfd24a72a7114a737fbc0e65
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-markupsafe/package.py
@@ -0,0 +1,27 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyMarkupsafe(Package):
+    """
+    MarkupSafe is a library for Python that implements a unicode
+    string that is aware of HTML escaping rules and can be used
+    to implement automatic string escaping. It is used by Jinja 2,
+    the Mako templating engine, the Pylons web framework and many more.
+    """
+
+    homepage = "http://www.pocoo.org/projects/markupsafe/"
+    url      = "https://github.com/pallets/markupsafe/archive/0.23.tar.gz"
+
+    version('0.23', '1a0dadc95169832367c9dcf142155cde')
+    version('0.22', '7a2ac7427b58def567628d06dc328396')
+    version('0.21', 'aebcd93ee05269773c8b80bb6c86fc2f')
+    version('0.20', '0c1fef97c8fd6a986d708f08d7f84a02')
+    version('0.19', '64b05361adb92c11839fc470e308c593')
+
+    extends("python")
+    depends_on("py-setuptools")
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/repos/builtin/packages/py-mistune/package.py b/var/spack/repos/builtin/packages/py-mistune/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..44a114b1730d81f151201a5632439d1e3ac20c30
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mistune/package.py
@@ -0,0 +1,22 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyMistune(Package):
+    """
+    Python markdown parser
+    """
+    homepage = "http://mistune.readthedocs.org/en/latest/"
+    url      = "https://github.com/lepture/mistune/archive/v0.7.1.tar.gz"
+
+    version('0.7.1', '0d9c29700c670790c5b2471070d32ec2')
+    version('0.7', '77750ae8b8d0d584894224a7e0c0523a')
+    version('0.6', 'd4f3d4f28a69e715f82b591d5dacf9a6')
+    version('0.5.1', '1c6cfce28a4aa90cf125217cd6c6fe6c')
+    version('0.5', '997736554f1f95eea78c66ae339b5722')
+
+    extends('python')
+    depends_on('py-setuptools')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-prettytable/package.py b/var/spack/repos/builtin/packages/py-prettytable/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..27fab7c0466467605adc0f19ed16508bb41e2f71
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-prettytable/package.py
@@ -0,0 +1,20 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyPrettytable(Package):
+    """
+    PrettyTable is a simple Python library designed to make
+    it quick and easy to represent tabular data in visually
+    appealing ASCII tables
+    """
+    homepage = "https://code.google.com/archive/p/prettytable/"
+    url      = "https://pypi.python.org/packages/e0/a1/36203205f77ccf98f3c6cf17cf068c972e6458d7e58509ca66da949ca347/prettytable-0.7.2.tar.gz"
+
+    version('0.7.2', 'a6b80afeef286ce66733d54a0296b13b')
+
+    extends("python")
+    depends_on("py-setuptools")
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-py2neo/package.py b/var/spack/repos/builtin/packages/py-py2neo/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d30b823a8768dc306920641e4813f8ab82990ad4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-py2neo/package.py
@@ -0,0 +1,22 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyPy2neo(Package):
+    """FIXME: put a proper description of your package here."""
+    # FIXME: add a proper url for your package's homepage here.
+    homepage = "http://www.example.com"
+    url      = "https://github.com/nigelsmall/py2neo/archive/py2neo-2.0.8.tar.gz"
+
+    version('2.0.8', 'e3ec5172a9e006515ef4155688a05a55')
+    version('2.0.7', '4cfbc5b7dfd7757f3d2e324805faa639')
+    version('2.0.6', '53e4cdb1a95fbae501c66e541d5f4929')
+    version('2.0.5', '143b1f9c0aa22faf170c1b9f84c7343b')
+    version('2.0.4', 'b3f7efd3344dc3f66db4eda11e5899f7')
+
+    depends_on("py-setuptools")
+    extends("python")
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/repos/builtin/packages/py-storm/package.py b/var/spack/repos/builtin/packages/py-storm/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..abc121d30b00b1d190df6108db8055243eb5102d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-storm/package.py
@@ -0,0 +1,16 @@
+from spack import depends_on, extends, version
+from spack import Package
+
+
+class PyStorm(Package):
+    """Storm is an object-relational mapper (ORM) for Python"""
+    homepage = "https://storm.canonical.com/"
+    url      = "https://launchpad.net/storm/trunk/0.20/+download/storm-0.20.tar.gz"
+
+    version('0.20', '8628503141f0f06c0749d607ac09b9c7')
+
+    extends('python')
+    depends_on('py-setuptools')
+
+    def install(self, spec, prefix):
+        python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/r-BiocGenerics/package.py b/var/spack/repos/builtin/packages/r-BiocGenerics/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..2d92c1c4d889299cc8b1ea898495c6e1ec2e048d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-BiocGenerics/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class RBiocgenerics(Package):
+    """S4 generic functions needed by many Bioconductor packages."""
+
+    homepage = 'https://www.bioconductor.org/packages/release/bioc/html/BiocGenerics.html'
+    url      = "https://www.bioconductor.org/packages/release/bioc/src/contrib/BiocGenerics_0.16.1.tar.gz"
+
+    version('0.16.1', 'c2148ffd86fc6f1f819c7f68eb2c744f', expand=False)
+
+    extends('R')
+
+    def install(self, spec, prefix):
+        R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
diff --git a/var/spack/repos/builtin/packages/r-abind/package.py b/var/spack/repos/builtin/packages/r-abind/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d06c7e9240aa8cf25e6e447da1bb5400a981ed07
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-abind/package.py
@@ -0,0 +1,18 @@
+from spack import *
+
+class RAbind(Package):
+    """Combine multidimensional arrays into a single array. This is a
+    generalization of 'cbind' and 'rbind'. Works with vectors, matrices, and
+    higher-dimensional arrays. Also provides functions 'adrop', 'asub', and
+    'afill' for manipulating, extracting and replacing data in arrays."""
+
+    homepage = "https://cran.r-project.org/"
+    url      = "https://cran.r-project.org/src/contrib/abind_1.4-3.tar.gz"
+
+    version('1.4-3', '10fcf80c677b991bf263d38be35a1fc5', expand=False)
+
+    extends('R')
+
+    def install(self, spec, prefix):
+
+        R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
diff --git a/var/spack/repos/builtin/packages/r-filehash/package.py b/var/spack/repos/builtin/packages/r-filehash/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..4911c636b4df73c21055bd85874c60b61c7025d9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-filehash/package.py
@@ -0,0 +1,22 @@
+from spack import *
+
+class RFilehash(Package):
+    """Implements a simple key-value style database where character string keys
+    are associated with data values that are stored on the disk. A simple
+    interface is provided for inserting, retrieving, and deleting data from the
+    database. Utilities are provided that allow 'filehash' databases to be
+    treated much like environments and lists are already used in R. These
+    utilities are provided to encourage interactive and exploratory analysis on
+    large datasets. Three different file formats for representing the database
+    are currently available and new formats can easily be incorporated by third
+    parties for use in the 'filehash' framework."""
+
+    homepage = 'https://cran.r-project.org/'
+    url      = "https://cran.r-project.org/src/contrib/filehash_2.3.tar.gz"
+
+    version('2.3', '01fffafe09b148ccadc9814c103bdc2f', expand=False)
+
+    extends('R')
+
+    def install(self, spec, prefix):
+        R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
diff --git a/var/spack/repos/builtin/packages/r-magic/package.py b/var/spack/repos/builtin/packages/r-magic/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..e900cdb216e2414a895962f364b16f37dfc075ab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-magic/package.py
@@ -0,0 +1,19 @@
+from spack import *
+
+class RMagic(Package):
+    """A collection of efficient, vectorized algorithms for the creation and
+    investigation of magic squares and hypercubes, including a variety of
+    functions for the manipulation and analysis of arbitrarily dimensioned
+    arrays."""
+
+    homepage = "https://cran.r-project.org/"
+    url      = "https://cran.r-project.org/src/contrib/magic_1.5-6.tar.gz"
+
+    version('1.5-6', 'a68e5ced253b2196af842e1fc84fd029', expand=False)
+
+    extends('R')
+
+    depends_on('r-abind')
+
+    def install(self, spec, prefix):
+        R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
diff --git a/var/spack/repos/builtin/packages/scotch/Makefile.esmumps b/var/spack/repos/builtin/packages/scotch/Makefile.esmumps
new file mode 100644
index 0000000000000000000000000000000000000000..4bfc76019773ef5be23934f627f7d848c365a1fa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scotch/Makefile.esmumps
@@ -0,0 +1,5 @@
+esmumps				:	scotch
+					(cd esmumps ; $(MAKE) scotch && $(MAKE) install)
+
+ptesmumps			:	ptscotch
+					(cd esmumps ; $(MAKE) ptscotch && $(MAKE) ptinstall)
diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py
index 8229ed86867114a57f92dfc7e5e9cd9ca2ee238a..8fad74b24f8d2e7d7ab4f66f799e2a9234084a1e 100644
--- a/var/spack/repos/builtin/packages/scotch/package.py
+++ b/var/spack/repos/builtin/packages/scotch/package.py
@@ -1,88 +1,125 @@
 from spack import *
-import os
+import os, re
 
 class Scotch(Package):
     """Scotch is a software package for graph and mesh/hypergraph
        partitioning, graph clustering, and sparse matrix ordering."""
+
     homepage = "http://www.labri.fr/perso/pelegrin/scotch/"
-    url      = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz"
+    url      = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_6.0.3.tar.gz"
+    base_url = "http://gforge.inria.fr/frs/download.php/latestfile/298"
     list_url = "http://gforge.inria.fr/frs/?group_id=248"
 
     version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc')
+    version('6.0.0', 'c50d6187462ba801f9a82133ee666e8e')
+    version('5.1.10b', 'f587201d6cf5cf63527182fbfba70753')
 
     variant('mpi', default=False, description='Activate the compilation of PT-Scotch')
     variant('compression', default=True, description='Activate the posibility to use compressed files')
     variant('esmumps', default=False, description='Activate the compilation of the lib esmumps needed by mumps')
     variant('shared', default=True, description='Build shared libraries')
 
-    depends_on('mpi', when='+mpi')
-    depends_on('zlib', when='+compression')
     depends_on('flex')
     depends_on('bison')
+    depends_on('mpi', when='+mpi')
+    depends_on('zlib', when='+compression')
 
-    def compiler_specifics(self, makefile_inc, defines):
-        if self.compiler.name == 'gcc':
-            defines.append('-Drestrict=__restrict')
-        elif self.compiler.name == 'intel':
-            defines.append('-restrict')
+    # NOTE: Versions of Scotch up to version 6.0.0 don't include support for
+    # building with 'esmumps' in their default packages.  In order to enable
+    # support for this feature, we must grab the 'esmumps' enabled archives
+    # from the Scotch hosting site.  These alternative archives include a strict
+    # superset of the behavior in their default counterparts, so we choose to
+    # always grab these versions for older Scotch versions for simplicity.
+    @when('@:6.0.0')
+    def url_for_version(self, version):
+        return '%s/scotch_%s_esmumps.tar.gz' % (Scotch.base_url, version)
+
+    @when('@6.0.1:')
+    def url_for_version(self, version):
+        return super(Scotch, self).url_for_version(version)
+
+    # NOTE: Several of the 'esmumps' enabled Scotch releases up to version 6.0.0
+    # have broken build scripts that don't properly build 'esmumps' as a separate
+    # target, so we need a patch procedure to remove 'esmumps' from existing targets
+    # and to add it as a standalone target.
+    @when('@:6.0.0')
+    def patch(self):
+        makefile_path = os.path.join('src', 'Makefile')
+        with open(makefile_path, 'r') as makefile:
+            esmumps_enabled = any(re.search(r'^esmumps(\s*):(.*)$', line) for line in makefile.readlines())
 
-        makefile_inc.append('CCS       = $(CC)')
+        if not esmumps_enabled:
+            mff = FileFilter(makefile_path)
+            mff.filter(r'^.*((esmumps)|(ptesmumps)).*(install).*$', '')
 
-        if '+mpi' in self.spec:
+            makefile_esmumps_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Makefile.esmumps')
+            with open(makefile_path, 'a') as makefile:
+                makefile.write('\ninclude %s\n' % makefile_esmumps_path)
+
+    @when('@6.0.1:')
+    def patch(self):
+        pass
+
+    # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc' file
+    # that contains all of the configuration variables and their desired values
+    # for the installation.  This function writes this file based on the given
+    # installation variants.
+    def configure(self):
+        makefile_inc = []
+        cflags = [
+            '-O3',
+            '-DCOMMON_RANDOM_FIXED_SEED',
+            '-DSCOTCH_DETERMINISTIC',
+            '-DSCOTCH_RENAME',
+            '-DIDXSIZE64'
+            ]
+
+        ## Library Build Type ##
+
+        if '+shared' in self.spec:
             makefile_inc.extend([
-                    'CCP       = %s' % os.path.join(self.spec['mpi'].prefix.bin, 'mpicc'),
-                    'CCD       = $(CCP)'
-                    ])
+                'LIB       = .so',
+                'CLIBFLAGS = -shared -fPIC',
+                'RANLIB    = echo',
+                'AR	       = $(CC)',
+                'ARFLAGS   = -shared $(LDFLAGS) -o'
+                ])
+            cflags.append('-fPIC')
         else:
             makefile_inc.extend([
-                    'CCP       = mpicc', # It is set but not used
-                    'CCD       = $(CCS)'
-                    ])
+                'LIB       = .a',
+                'CLIBFLAGS = ',
+                'RANLIB    = ranlib',
+                'AR	       = ar',
+                'ARFLAGS   = -ruv '
+                ])
 
+        ## Compiler-Specific Options ##
 
+        if self.compiler.name == 'gcc':
+            cflags.append('-Drestrict=__restrict')
+        elif self.compiler.name == 'intel':
+            cflags.append('-restrict')
 
-    def library_build_type(self, makefile_inc, defines):
-        makefile_inc.extend([
-            'LIB       = .a',
-            'CLIBFLAGS = ',
-            'RANLIB    = ranlib',
-            'AR	       = ar',
-            'ARFLAGS   = -ruv '
-            ])
+        makefile_inc.append('CCS       = $(CC)')
+        makefile_inc.append('CCP       = %s' %
+            (self.spec['mpi'].mpicc if '+mpi' in self.spec else 'mpicc'))
+        makefile_inc.append('CCD       = $(CCS)')
 
-    @when('+shared')
-    def library_build_type(self, makefile_inc, defines):
-        makefile_inc.extend([
-            'LIB       = .so',
-            'CLIBFLAGS = -shared -fPIC',
-            'RANLIB    = echo',
-            'AR	       = $(CC)',
-            'ARFLAGS   = -shared $(LDFLAGS) -o'
-            ])
+        ## Extra Features ##
 
-    def extra_features(self, makefile_inc, defines):
         ldflags = []
-        
+
         if '+compression' in self.spec:
-            defines.append('-DCOMMON_FILE_COMPRESS_GZ')
+            cflags.append('-DCOMMON_FILE_COMPRESS_GZ')
             ldflags.append('-L%s -lz' % (self.spec['zlib'].prefix.lib))
 
-        defines.append('-DCOMMON_PTHREAD')
+        cflags.append('-DCOMMON_PTHREAD')
         ldflags.append('-lm -lrt -pthread')
-           
-        makefile_inc.append('LDFLAGS   = %s' % ' '.join(ldflags))
 
-    def patch(self):
-        makefile_inc = []
-        defines = [ 
-            '-DCOMMON_RANDOM_FIXED_SEED',
-            '-DSCOTCH_DETERMINISTIC',
-            '-DSCOTCH_RENAME',
-            '-DIDXSIZE64' ]
+        makefile_inc.append('LDFLAGS   = %s' % ' '.join(ldflags))
 
-        self.library_build_type(makefile_inc, defines)
-        self.compiler_specifics(makefile_inc, defines)
-        self.extra_features(makefile_inc, defines)
+        ## General Features ##
 
         makefile_inc.extend([
             'EXE       =',
@@ -93,18 +130,19 @@ def patch(self):
             'MKDIR     = mkdir',
             'MV        = mv',
             'CP        = cp',
-            'CFLAGS    = -O3 %s' % (' '.join(defines)),
+            'CFLAGS    = %s' % ' '.join(cflags),
             'LEX       = %s -Pscotchyy -olex.yy.c' % os.path.join(self.spec['flex'].prefix.bin , 'flex'),
             'YACC      = %s -pscotchyy -y -b y' %    os.path.join(self.spec['bison'].prefix.bin, 'bison'),
-            'prefix    = %s' % self.prefix,
-            ''
+            'prefix    = %s' % self.prefix
             ])
 
         with working_dir('src'):
             with open('Makefile.inc', 'w') as fh:
                 fh.write('\n'.join(makefile_inc))
-            
+
     def install(self, spec, prefix):
+        self.configure()
+
         targets = ['scotch']
         if '+mpi' in self.spec:
             targets.append('ptscotch')
@@ -115,12 +153,10 @@ def install(self, spec, prefix):
                 targets.append('ptesmumps')
 
         with working_dir('src'):
-            for app in targets:
-                make(app, parallel=(not app=='ptesmumps'))
+            for target in targets:
+                make(target, parallel=(target!='ptesmumps'))
 
-        
         install_tree('bin', prefix.bin)
         install_tree('lib', prefix.lib)
         install_tree('include', prefix.include)
         install_tree('man/man1', prefix.share_man1)
-
diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index b7894e4d2bd43097538ed6156d2ce4a77b9d9551..7e68663c6edf7466141d0011eb4d66d7dcf43908 100644
--- a/var/spack/repos/builtin/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
@@ -1,5 +1,6 @@
 from spack import *
 
+
 class Silo(Package):
     """Silo is a library for reading and writing a wide variety of scientific
        data to binary, disk files."""
@@ -12,6 +13,7 @@ class Silo(Package):
     version('4.8', 'b1cbc0e7ec435eb656dc4b53a23663c9')
 
     variant('fortran', default=True, description='Enable Fortran support')
+    variant('shared', default=True, description='Build shared libraries')
     variant('silex', default=False, description='Builds Silex, a GUI for viewing Silo files')
 
     depends_on('hdf5')
@@ -21,6 +23,7 @@ def install(self, spec, prefix):
         config_args = [
             '--enable-fortran' if '+fortran' in spec else '--disable-fortran',
             '--enable-silex' if '+silex' in spec else '--disable-silex',
+            '--enable-shared' if '+shared' in spec else '--disable-shared',
         ]
 
         if '+silex' in spec:
@@ -30,6 +33,7 @@ def install(self, spec, prefix):
             '--prefix=%s' % prefix,
             '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib),
             '--with-zlib=%s,%s' % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib),
+            '--enable-install-lite-headers',
             *config_args)
 
         make()
diff --git a/var/spack/repos/builtin/packages/the_silver_searcher/package.py b/var/spack/repos/builtin/packages/the_silver_searcher/package.py
index e4020b676609f8d588b56603f7251a8843470640..30f06354bfcc7a808aa453939c2d92d1f2dbc0bb 100644
--- a/var/spack/repos/builtin/packages/the_silver_searcher/package.py
+++ b/var/spack/repos/builtin/packages/the_silver_searcher/package.py
@@ -9,6 +9,7 @@ class TheSilverSearcher(Package):
 
     depends_on('pcre')
     depends_on('xz')
+    depends_on('pkg-config')
 
     def install(self, spec, prefix):
         configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/unibilium/package.py b/var/spack/repos/builtin/packages/unibilium/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef5de56f791920e1ab83eab8811c37f3245d0635
--- /dev/null
+++ b/var/spack/repos/builtin/packages/unibilium/package.py
@@ -0,0 +1,12 @@
+from spack import *
+
+class Unibilium(Package):
+    """A terminfo parsing library"""
+    homepage = "https://github.com/mauke/unibilium"
+    url      = "https://github.com/mauke/unibilium/archive/v1.2.0.tar.gz"
+
+    version('1.2.0', '9b1c97839a880a373da6c097443b43c4')
+
+    def install(self, spec, prefix):
+        make("PREFIX="+prefix)
+        make("install", "PREFIX="+prefix)
diff --git a/var/spack/repos/builtin/packages/visit/package.py b/var/spack/repos/builtin/packages/visit/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..14e3b6a0c1610c9c2a6d2735757de2e6ed7f2358
--- /dev/null
+++ b/var/spack/repos/builtin/packages/visit/package.py
@@ -0,0 +1,31 @@
+from spack import *
+
+
+class Visit(Package):
+    """VisIt is an Open Source, interactive, scalable, visualization, animation and analysis tool."""
+    homepage = "https://wci.llnl.gov/simulation/computer-codes/visit/"
+    url = "http://portal.nersc.gov/project/visit/releases/2.10.1/visit2.10.1.tar.gz"
+
+    version('2.10.1', '3cbca162fdb0249f17c4456605c4211e')
+    version('2.10.2', '253de0837a9d69fb689befc98ea4d068')
+
+    depends_on("vtk@6.1.0~opengl2")
+    depends_on("qt@4.8.6")
+    depends_on("python")
+    depends_on("silo+shared")
+
+    def install(self, spec, prefix):
+        with working_dir('spack-build', create=True):
+
+            feature_args = std_cmake_args[:]
+            feature_args.extend(["-DVTK_MAJOR_VERSION=6",
+                                 "-DVTK_MINOR_VERSION=1",
+                                 "-DVISIT_LOC_QMAKE_EXE:FILEPATH=%s/qmake-qt4" % spec['qt'].prefix.bin,
+                                 "-DPYTHON_EXECUTABLE:FILEPATH=%s/python" % spec['python'].prefix.bin,
+                                 "-DVISIT_SILO_DIR:PATH=%s" % spec['silo'].prefix,
+                                 "-DVISIT_HDF5_DIR:PATH=%s" % spec['hdf5'].prefix])
+
+            cmake('../src', *feature_args)
+
+            make()
+            make("install")
diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py
index e36fb936e0d208c2b6ebf2d8a50afdb826af2517..bd02ddcd4b1eb46fb54a7d09284712b2c32f5f0d 100644
--- a/var/spack/repos/builtin/packages/xerces-c/package.py
+++ b/var/spack/repos/builtin/packages/xerces-c/package.py
@@ -1,19 +1,3 @@
-# FIXME:
-# This is a template package file for Spack.  We've conveniently
-# put "FIXME" labels next to all the things you'll want to change.
-#
-# Once you've edited all the FIXME's, delete this whole message,
-# save this file, and test out your package like this:
-#
-#     spack install xerces-c
-#
-# You can always get back here to change things with:
-#
-#     spack edit xerces-c
-#
-# See the spack documentation for more information on building
-# packages.
-#
 from spack import *
 
 class XercesC(Package):