diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py
index b295341d489d5e0d00e0bb78d3692471406d2361..5fd7163e2e7c13c483aef28e993c1432c3c5ada6 100644
--- a/lib/spack/llnl/util/lock.py
+++ b/lib/spack/llnl/util/lock.py
@@ -174,8 +174,9 @@ def _lock(self, op, timeout=None):
             # If the file were writable, we'd have opened it 'r+'
             raise LockROFileError(self.path)
 
-        tty.debug("{0} locking [{1}:{2}]: timeout {3} sec"
-                  .format(lock_type[op], self._start, self._length, timeout))
+        self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
+                        .format(lock_type[op], self._start, self._length,
+                                timeout))
 
         poll_intervals = iter(Lock._poll_interval_generator())
         start_time = time.time()
@@ -211,14 +212,14 @@ def _poll_lock(self, op):
             # help for debugging distributed locking
             if self.debug:
                 # All locks read the owner PID and host
-                self._read_debug_data()
-                tty.debug('{0} locked {1} [{2}:{3}] (owner={4})'
-                          .format(lock_type[op], self.path,
-                                  self._start, self._length, self.pid))
+                self._read_log_debug_data()
+                self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
+                                .format(lock_type[op], self.path,
+                                        self._start, self._length, self.pid))
 
                 # Exclusive locks write their PID/host
                 if op == fcntl.LOCK_EX:
-                    self._write_debug_data()
+                    self._write_log_debug_data()
 
             return True
 
@@ -245,7 +246,7 @@ def _ensure_parent_directory(self):
                 raise
         return parent
 
-    def _read_debug_data(self):
+    def _read_log_debug_data(self):
         """Read PID and host data out of the file if it is there."""
         self.old_pid = self.pid
         self.old_host = self.host
@@ -257,7 +258,7 @@ def _read_debug_data(self):
             _, _, self.host = host.rpartition('=')
             self.pid = int(self.pid)
 
-    def _write_debug_data(self):
+    def _write_log_debug_data(self):
         """Write PID and host data to the file, recording old values."""
         self.old_pid = self.pid
         self.old_host = self.host
@@ -473,9 +474,6 @@ def release_write(self, release_fn=None):
             else:
                 return False
 
-    def _debug(self, *args):
-        tty.debug(*args)
-
     def _get_counts_desc(self):
         return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
             if tty.is_verbose() else ''
@@ -484,58 +482,50 @@ def _log_acquired(self, locktype, wait_time, nattempts):
         attempts_part = _attempts_str(wait_time, nattempts)
         now = datetime.now()
         desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
-        self._debug(self._status_msg(locktype, '{0}{1}'.
-                                     format(desc, attempts_part)))
+        self._log_debug(self._status_msg(locktype, '{0}{1}'
+                        .format(desc, attempts_part)))
 
     def _log_acquiring(self, locktype):
-        self._debug2(self._status_msg(locktype, 'Acquiring'))
+        self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
+
+    def _log_debug(self, *args, **kwargs):
+        """Output lock debug messages."""
+        kwargs['level'] = kwargs.get('level', 2)
+        tty.debug(*args, **kwargs)
 
     def _log_downgraded(self, wait_time, nattempts):
         attempts_part = _attempts_str(wait_time, nattempts)
         now = datetime.now()
         desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
-        self._debug(self._status_msg('READ LOCK', '{0}{1}'
-                                     .format(desc, attempts_part)))
+        self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
+                        .format(desc, attempts_part)))
 
     def _log_downgrading(self):
-        self._debug2(self._status_msg('WRITE LOCK', 'Downgrading'))
+        self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
 
     def _log_released(self, locktype):
         now = datetime.now()
         desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
-        self._debug(self._status_msg(locktype, desc))
+        self._log_debug(self._status_msg(locktype, desc))
 
     def _log_releasing(self, locktype):
-        self._debug2(self._status_msg(locktype, 'Releasing'))
+        self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
 
     def _log_upgraded(self, wait_time, nattempts):
         attempts_part = _attempts_str(wait_time, nattempts)
         now = datetime.now()
         desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
-        self._debug(self._status_msg('WRITE LOCK', '{0}{1}'.
-                                     format(desc, attempts_part)))
+        self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
+                        format(desc, attempts_part)))
 
     def _log_upgrading(self):
-        self._debug2(self._status_msg('READ LOCK', 'Upgrading'))
+        self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
 
     def _status_msg(self, locktype, status):
         status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
         return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
             locktype, self, status_desc)
 
-    def _debug2(self, *args):
-        # TODO: Easy place to make a single, temporary change to the
-        # TODO:   debug level associated with the more detailed messages.
-        # TODO:
-        # TODO:   Someday it would be great if we could switch this to
-        # TODO:   another level, perhaps _between_ debug and verbose, or
-        # TODO:   some other form of filtering so the first level of
-        # TODO:   debugging doesn't have to generate these messages.  Using
-        # TODO:   verbose here did not work as expected because tests like
-        # TODO:   test_spec_json will write the verbose messages to the
-        # TODO:   output that is used to check test correctness.
-        tty.debug(*args)
-
 
 class LockTransaction(object):
     """Simple nested transaction context manager that uses a file lock.
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index 41eef5d2842a5e87d1acd802c2bb56fc8d06f266..79ca5a99292dd7d3769e7b0b2e55539e9cf3a1e4 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -19,7 +19,8 @@
 
 from llnl.util.tty.color import cprint, cwrite, cescape, clen
 
-_debug = False
+# Globals
+_debug = 0
 _verbose = False
 _stacktrace = False
 _timestamp = False
@@ -29,21 +30,26 @@
 indent = "  "
 
 
+def debug_level():
+    return _debug
+
+
 def is_verbose():
     return _verbose
 
 
-def is_debug():
-    return _debug
+def is_debug(level=1):
+    return _debug >= level
 
 
 def is_stacktrace():
     return _stacktrace
 
 
-def set_debug(flag):
+def set_debug(level=0):
     global _debug
-    _debug = flag
+    assert level >= 0, 'Debug level must be a positive value'
+    _debug = level
 
 
 def set_verbose(flag):
@@ -132,12 +138,17 @@ def process_stacktrace(countback):
     return st_text
 
 
+def show_pid():
+    return is_debug(2)
+
+
 def get_timestamp(force=False):
     """Get a string timestamp"""
     if _debug or _timestamp or force:
         # Note inclusion of the PID is useful for parallel builds.
-        return '[{0}, {1}] '.format(
-            datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), os.getpid())
+        pid = ', {0}'.format(os.getpid()) if show_pid() else ''
+        return '[{0}{1}] '.format(
+            datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
     else:
         return ''
 
@@ -197,7 +208,8 @@ def verbose(message, *args, **kwargs):
 
 
 def debug(message, *args, **kwargs):
-    if _debug:
+    level = kwargs.get('level', 1)
+    if is_debug(level):
         kwargs.setdefault('format', 'g')
         kwargs.setdefault('stream', sys.stderr)
         info(message, *args, **kwargs)
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index 76e07c0e080b5e8de6ad653e8137313a49420dc5..de5ffa8eec62148948b63f49cd9bfd27b5a8ea6a 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -323,14 +323,14 @@ class log_output(object):
     work within test frameworks like nose and pytest.
     """
 
-    def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
+    def __init__(self, file_like=None, echo=False, debug=0, buffer=False):
         """Create a new output log context manager.
 
         Args:
             file_like (str or stream): open file object or name of file where
                 output should be logged
             echo (bool): whether to echo output in addition to logging it
-            debug (bool): whether to enable tty debug mode during logging
+            debug (int): positive to enable tty debug mode during logging
             buffer (bool): pass buffer=True to skip unbuffering output; note
                 this doesn't set up any *new* buffering
 
@@ -355,7 +355,7 @@ def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
         self._active = False  # used to prevent re-entry
 
     def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
-        """Thie behaves the same as init. It allows a logger to be reused.
+        """This behaves the same as init. It allows a logger to be reused.
 
         Arguments are the same as for ``__init__()``.  Args here take
         precedence over those passed to ``__init__()``.
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index a101abefa16e2fe1998175aadf0af74f74dcd795..ccfe614720b84cd98fed08e9cf13c846b64032d8 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -466,8 +466,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
     web_util.push_to_url(
         specfile_path, remote_specfile_path, keep_original=False)
 
-    tty.msg('Buildache for "%s" written to \n %s' %
-            (spec, remote_spackfile_path))
+    tty.debug('Buildcache for "{0}" written to \n {1}'
+              .format(spec, remote_spackfile_path))
 
     try:
         # create an index.html for the build_cache directory so specs can be
@@ -828,13 +828,13 @@ def get_spec(spec=None, force=False):
 
         mirror_dir = url_util.local_file_path(fetch_url_build_cache)
         if mirror_dir:
-            tty.msg("Finding buildcaches in %s" % mirror_dir)
+            tty.debug('Finding buildcaches in {0}'.format(mirror_dir))
             link = url_util.join(fetch_url_build_cache, specfile_name)
             urls.add(link)
 
         else:
-            tty.msg("Finding buildcaches at %s" %
-                    url_util.format(fetch_url_build_cache))
+            tty.debug('Finding buildcaches at {0}'
+                      .format(url_util.format(fetch_url_build_cache)))
             link = url_util.join(fetch_url_build_cache, specfile_name)
             urls.add(link)
 
@@ -857,8 +857,8 @@ def get_specs(allarch=False):
         fetch_url_build_cache = url_util.join(
             mirror.fetch_url, _build_cache_relative_path)
 
-        tty.msg("Finding buildcaches at %s" %
-                url_util.format(fetch_url_build_cache))
+        tty.debug('Finding buildcaches at {0}'
+                  .format(url_util.format(fetch_url_build_cache)))
 
         index_url = url_util.join(fetch_url_build_cache, 'index.json')
 
@@ -909,15 +909,15 @@ def get_keys(install=False, trust=False, force=False):
 
         mirror_dir = url_util.local_file_path(fetch_url_build_cache)
         if mirror_dir:
-            tty.msg("Finding public keys in %s" % mirror_dir)
+            tty.debug('Finding public keys in {0}'.format(mirror_dir))
             files = os.listdir(str(mirror_dir))
             for file in files:
                 if re.search(r'\.key', file) or re.search(r'\.pub', file):
                     link = url_util.join(fetch_url_build_cache, file)
                     keys.add(link)
         else:
-            tty.msg("Finding public keys at %s" %
-                    url_util.format(fetch_url_build_cache))
+            tty.debug('Finding public keys at {0}'
+                      .format(url_util.format(fetch_url_build_cache)))
             # For s3 mirror need to request index.html directly
             p, links = web_util.spider(
                 url_util.join(fetch_url_build_cache, 'index.html'))
@@ -935,14 +935,14 @@ def get_keys(install=False, trust=False, force=False):
                         stage.fetch()
                     except fs.FetchError:
                         continue
-            tty.msg('Found key %s' % link)
+            tty.debug('Found key {0}'.format(link))
             if install:
                 if trust:
                     Gpg.trust(stage.save_filename)
-                    tty.msg('Added this key to trusted keys.')
+                    tty.debug('Added this key to trusted keys.')
                 else:
-                    tty.msg('Will not add this key to trusted keys.'
-                            'Use -t to install all downloaded keys')
+                    tty.debug('Will not add this key to trusted keys.'
+                              'Use -t to install all downloaded keys')
 
 
 def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
@@ -1029,7 +1029,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
     """
     rebuilds = {}
     for mirror in spack.mirror.MirrorCollection(mirrors).values():
-        tty.msg('Checking for built specs at %s' % mirror.fetch_url)
+        tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url))
 
         rebuild_list = []
 
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 5f0cc4db5d7ca95bf3292f4e4260e988335b15ef..fb87373897fc677fc2e9b97d054a80af9009bdad 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -289,10 +289,11 @@ def candidate_urls(self):
     @_needs_stage
     def fetch(self):
         if self.archive_file:
-            tty.msg("Already downloaded %s" % self.archive_file)
+            tty.debug('Already downloaded {0}'.format(self.archive_file))
             return
 
         url = None
+        errors = []
         for url in self.candidate_urls:
             try:
                 partial_file, save_file = self._fetch_from_url(url)
@@ -300,8 +301,10 @@ def fetch(self):
                     os.rename(partial_file, save_file)
                 break
             except FetchError as e:
-                tty.msg(str(e))
-                pass
+                errors.append(str(e))
+
+        for msg in errors:
+            tty.debug(msg)
 
         if not self.archive_file:
             raise FailedDownloadError(url)
@@ -312,7 +315,7 @@ def _fetch_from_url(self, url):
         if self.stage.save_filename:
             save_file = self.stage.save_filename
             partial_file = self.stage.save_filename + '.part'
-        tty.msg("Fetching %s" % url)
+        tty.debug('Fetching {0}'.format(url))
         if partial_file:
             save_args = ['-C',
                          '-',  # continue partial downloads
@@ -327,6 +330,8 @@ def _fetch_from_url(self, url):
             '-',  # print out HTML headers
             '-L',  # resolve 3xx redirects
             url,
+            '--stderr',  # redirect stderr output
+            '-',         # redirect to stdout
         ]
 
         if not spack.config.get('config:verify_ssl'):
@@ -412,8 +417,8 @@ def cachable(self):
     @_needs_stage
     def expand(self):
         if not self.expand_archive:
-            tty.msg("Staging unexpanded archive %s in %s" % (
-                    self.archive_file, self.stage.source_path))
+            tty.debug('Staging unexpanded archive {0} in {1}'
+                      .format(self.archive_file, self.stage.source_path))
             if not self.stage.expanded:
                 mkdirp(self.stage.source_path)
             dest = os.path.join(self.stage.source_path,
@@ -421,7 +426,7 @@ def expand(self):
             shutil.move(self.archive_file, dest)
             return
 
-        tty.msg("Staging archive: %s" % self.archive_file)
+        tty.debug('Staging archive: {0}'.format(self.archive_file))
 
         if not self.archive_file:
             raise NoArchiveFileError(
@@ -564,7 +569,7 @@ def fetch(self):
                 raise
 
         # Notify the user how we fetched.
-        tty.msg('Using cached archive: %s' % path)
+        tty.debug('Using cached archive: {0}'.format(path))
 
 
 class VCSFetchStrategy(FetchStrategy):
@@ -594,7 +599,8 @@ def __init__(self, **kwargs):
 
     @_needs_stage
     def check(self):
-        tty.msg("No checksum needed when fetching with %s" % self.url_attr)
+        tty.debug('No checksum needed when fetching with {0}'
+                  .format(self.url_attr))
 
     @_needs_stage
     def expand(self):
@@ -672,7 +678,7 @@ def go(self):
 
     @_needs_stage
     def fetch(self):
-        tty.msg("Getting go resource:", self.url)
+        tty.debug('Getting go resource: {0}'.format(self.url))
 
         with working_dir(self.stage.path):
             try:
@@ -788,10 +794,10 @@ def _repo_info(self):
     @_needs_stage
     def fetch(self):
         if self.stage.expanded:
-            tty.msg("Already fetched {0}".format(self.stage.source_path))
+            tty.debug('Already fetched {0}'.format(self.stage.source_path))
             return
 
-        tty.msg("Cloning git repository: {0}".format(self._repo_info()))
+        tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
 
         git = self.git
         if self.commit:
@@ -959,10 +965,10 @@ def mirror_id(self):
     @_needs_stage
     def fetch(self):
         if self.stage.expanded:
-            tty.msg("Already fetched %s" % self.stage.source_path)
+            tty.debug('Already fetched {0}'.format(self.stage.source_path))
             return
 
-        tty.msg("Checking out subversion repository: %s" % self.url)
+        tty.debug('Checking out subversion repository: {0}'.format(self.url))
 
         args = ['checkout', '--force', '--quiet']
         if self.revision:
@@ -1068,13 +1074,14 @@ def mirror_id(self):
     @_needs_stage
     def fetch(self):
         if self.stage.expanded:
-            tty.msg("Already fetched %s" % self.stage.source_path)
+            tty.debug('Already fetched {0}'.format(self.stage.source_path))
             return
 
         args = []
         if self.revision:
             args.append('at revision %s' % self.revision)
-        tty.msg("Cloning mercurial repository:", self.url, *args)
+        tty.debug('Cloning mercurial repository: {0} {1}'
+                  .format(self.url, args))
 
         args = ['clone']
 
@@ -1130,7 +1137,7 @@ def __init__(self, *args, **kwargs):
     @_needs_stage
     def fetch(self):
         if self.archive_file:
-            tty.msg("Already downloaded %s" % self.archive_file)
+            tty.debug('Already downloaded {0}'.format(self.archive_file))
             return
 
         parsed_url = url_util.parse(self.url)
@@ -1138,7 +1145,7 @@ def fetch(self):
             raise FetchError(
                 'S3FetchStrategy can only fetch from s3:// urls.')
 
-        tty.msg("Fetching %s" % self.url)
+        tty.debug('Fetching {0}'.format(self.url))
 
         basename = os.path.basename(parsed_url.path)
 
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index b9ae0f46fa907cf031d953774150259e24af7736..d5d15f4f9964fa9bc8a26984d17f46f2b7fd8beb 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -215,18 +215,18 @@ def _hms(seconds):
 
 def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
     """
-    Install the package from binary cache
+    Extract the package from binary cache
 
     Args:
         pkg (PackageBase): the package to install from the binary cache
-        cache_only (bool): only install from binary cache
+        cache_only (bool): only extract from binary cache
         explicit (bool): ``True`` if installing the package was explicitly
             requested by the user, otherwise, ``False``
         unsigned (bool): ``True`` if binary package signatures to be checked,
             otherwise, ``False``
 
     Return:
-        (bool) ``True`` if the package was installed from binary cache,
+        (bool) ``True`` if the package was extract from binary cache,
             ``False`` otherwise
     """
     installed_from_cache = _try_install_from_binary_cache(pkg, explicit,
@@ -237,10 +237,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
         if cache_only:
             tty.die('{0} when cache-only specified'.format(pre))
 
-        tty.debug('{0}: installing from source'.format(pre))
+        tty.msg('{0}: installing from source'.format(pre))
         return False
 
-    tty.debug('Successfully installed {0} from binary cache'.format(pkg_id))
+    tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
     _print_installed_pkg(pkg.spec.prefix)
     spack.hooks.post_install(pkg.spec)
     return True
@@ -275,17 +275,17 @@ def _process_external_package(pkg, explicit):
     if spec.external_module:
         tty.msg('{0} has external module in {1}'
                 .format(pre, spec.external_module))
-        tty.msg('{0} is actually installed in {1}'
-                .format(pre, spec.external_path))
+        tty.debug('{0} is actually installed in {1}'
+                  .format(pre, spec.external_path))
     else:
-        tty.msg("{0} externally installed in {1}"
+        tty.msg('{0} externally installed in {1}'
                 .format(pre, spec.external_path))
 
     try:
         # Check if the package was already registered in the DB.
         # If this is the case, then just exit.
         rec = spack.store.db.get_record(spec)
-        tty.msg('{0} already registered in DB'.format(pre))
+        tty.debug('{0} already registered in DB'.format(pre))
 
         # Update the value of rec.explicit if it is necessary
         _update_explicit_entry_in_db(pkg, rec, explicit)
@@ -294,11 +294,11 @@ def _process_external_package(pkg, explicit):
         # If not, register it and generate the module file.
         # For external packages we just need to run
         # post-install hooks to generate module files.
-        tty.msg('{0} generating module file'.format(pre))
+        tty.debug('{0} generating module file'.format(pre))
         spack.hooks.post_install(spec)
 
         # Add to the DB
-        tty.msg('{0} registering into DB'.format(pre))
+        tty.debug('{0} registering into DB'.format(pre))
         spack.store.db.add(spec, None, explicit=explicit)
 
 
@@ -314,7 +314,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
             otherwise, ``False``
 
     Return:
-        (bool) ``True`` if the package was installed from binary cache,
+        (bool) ``True`` if the package was extracted from binary cache,
             else ``False``
     """
     tarball = binary_distribution.download_tarball(binary_spec)
@@ -325,7 +325,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
         return False
 
     pkg_id = package_id(pkg)
-    tty.msg('Installing {0} from binary cache'.format(pkg_id))
+    tty.msg('Extracting {0} from binary cache'.format(pkg_id))
     binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
                                         unsigned=unsigned, force=False)
     pkg.installed_from_binary_cache = True
@@ -335,10 +335,10 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
 
 def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
     """
-    Try to install the package from binary cache.
+    Try to extract the package from binary cache.
 
     Args:
-        pkg (PackageBase): the package to be installed from binary cache
+        pkg (PackageBase): the package to be extracted from binary cache
         explicit (bool): the package was explicitly requested by the user
         unsigned (bool): ``True`` if binary package signatures to be checked,
             otherwise, ``False``
@@ -369,7 +369,7 @@ def _update_explicit_entry_in_db(pkg, rec, explicit):
         with spack.store.db.write_transaction():
             rec = spack.store.db.get_record(pkg.spec)
             message = '{s.name}@{s.version} : marking the package explicit'
-            tty.msg(message.format(s=pkg.spec))
+            tty.debug(message.format(s=pkg.spec))
             rec.explicit = True
 
 
@@ -452,7 +452,8 @@ def install_msg(name, pid):
     Return:
         (str) Colorized installing message
     """
-    return '{0}: '.format(pid) + colorize('@*{Installing} @*g{%s}' % name)
+    pre = '{0}: '.format(pid) if tty.show_pid() else ''
+    return pre + colorize('@*{Installing} @*g{%s}' % name)
 
 
 def log(pkg):
@@ -1064,7 +1065,8 @@ def _install_task(self, task, **kwargs):
 
         pkg.run_tests = (tests is True or tests and pkg.name in tests)
 
-        pre = '{0}: {1}:'.format(self.pid, pkg.name)
+        pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
+        pre = '{0}{1}:'.format(pid, pkg.name)
 
         def build_process():
             """
@@ -1083,8 +1085,8 @@ def build_process():
                     pkg.do_stage()
 
             pkg_id = package_id(pkg)
-            tty.msg('{0} Building {1} [{2}]'
-                    .format(pre, pkg_id, pkg.build_system_class))
+            tty.debug('{0} Building {1} [{2}]'
+                      .format(pre, pkg_id, pkg.build_system_class))
 
             # get verbosity from do_install() parameter or saved value
             echo = verbose
@@ -1105,8 +1107,8 @@ def build_process():
                     if install_source and os.path.isdir(source_path):
                         src_target = os.path.join(pkg.spec.prefix, 'share',
                                                   pkg.name, 'src')
-                        tty.msg('{0} Copying source to {1}'
-                                .format(pre, src_target))
+                        tty.debug('{0} Copying source to {1}'
+                                  .format(pre, src_target))
                         fs.install_tree(pkg.stage.source_path, src_target)
 
                     # Do the real install in the source directory.
@@ -1128,7 +1130,7 @@ def build_process():
                                 pass
 
                         # cache debug settings
-                        debug_enabled = tty.is_debug()
+                        debug_level = tty.debug_level()
 
                         # Spawn a daemon that reads from a pipe and redirects
                         # everything to log_path
@@ -1137,11 +1139,11 @@ def build_process():
                                     pkg.phases, pkg._InstallPhase_phases):
 
                                 with logger.force_echo():
-                                    inner_debug = tty.is_debug()
-                                    tty.set_debug(debug_enabled)
+                                    inner_debug_level = tty.debug_level()
+                                    tty.set_debug(debug_level)
                                     tty.msg("{0} Executing phase: '{1}'"
                                             .format(pre, phase_name))
-                                    tty.set_debug(inner_debug)
+                                    tty.set_debug(inner_debug_level)
 
                                 # Redirect stdout and stderr to daemon pipe
                                 phase = getattr(pkg, phase_attr)
@@ -1157,11 +1159,11 @@ def build_process():
             pkg._total_time = time.time() - start_time
             build_time = pkg._total_time - pkg._fetch_time
 
-            tty.msg('{0} Successfully installed {1}'
-                    .format(pre, pkg_id),
-                    'Fetch: {0}.  Build: {1}.  Total: {2}.'
-                    .format(_hms(pkg._fetch_time), _hms(build_time),
-                            _hms(pkg._total_time)))
+            tty.debug('{0} Successfully installed {1}'
+                      .format(pre, pkg_id),
+                      'Fetch: {0}.  Build: {1}.  Total: {2}.'
+                      .format(_hms(pkg._fetch_time), _hms(build_time),
+                              _hms(pkg._total_time)))
             _print_installed_pkg(pkg.prefix)
 
             # preserve verbosity across runs
@@ -1192,7 +1194,8 @@ def build_process():
         except spack.build_environment.StopPhase as e:
             # A StopPhase exception means that do_install was asked to
             # stop early from clients, and is not an error at this point
-            tty.debug('{0} {1}'.format(self.pid, str(e)))
+            pre = '{0}'.format(self.pid) if tty.show_pid() else ''
+            tty.debug('{0}{1}'.format(pid, str(e)))
             tty.debug('Package stage directory : {0}'
                       .format(pkg.stage.source_path))
 
diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py
index bdeef049ff617a26642ca280d136fd0f58ca6e2c..244913d6f5a2acbb8ac2df091eb30ee25d5326e7 100644
--- a/lib/spack/spack/main.py
+++ b/lib/spack/spack/main.py
@@ -362,8 +362,9 @@ def make_argument_parser(**kwargs):
         '-C', '--config-scope', dest='config_scopes', action='append',
         metavar='DIR', help="add a custom configuration scope")
     parser.add_argument(
-        '-d', '--debug', action='store_true',
-        help="write out debug logs during compile")
+        '-d', '--debug', action='count', default=0,
+        help="write out debug messages "
+             "(more d's for more verbosity: -d, -dd, -ddd, etc.)")
     parser.add_argument(
         '--timestamp', action='store_true',
         help="Add a timestamp to tty output")
@@ -438,7 +439,7 @@ def setup_main_options(args):
     tty.set_debug(args.debug)
     tty.set_stacktrace(args.stacktrace)
 
-    # debug must be set first so that it can even affect behvaior of
+    # debug must be set first so that it can even affect behavior of
     # errors raised by spack.config.
     if args.debug:
         spack.error.debug = True
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 5c4ff23d40662b7cebbe3db26297e577190d41fb..4febfb1b47afd508d1a5ebcd6934a0cb21f44776 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -1121,9 +1121,8 @@ def do_fetch(self, mirror_only=False):
             raise ValueError("Can only fetch concrete packages.")
 
         if not self.has_code:
-            tty.msg(
-                "No fetch required for %s: package has no code." % self.name
-            )
+            tty.debug('No fetch required for {0}: package has no code.'
+                      .format(self.name))
 
         start_time = time.time()
         checksum = spack.config.get('config:checksum')
@@ -1139,7 +1138,8 @@ def do_fetch(self, mirror_only=False):
                 ignore_checksum = tty.get_yes_or_no("  Fetch anyway?",
                                                     default=False)
                 if ignore_checksum:
-                    tty.msg("Fetching with no checksum.", ck_msg)
+                    tty.debug('Fetching with no checksum. {0}'
+                              .format(ck_msg))
 
             if not ignore_checksum:
                 raise FetchError("Will not fetch %s" %
@@ -1195,7 +1195,7 @@ def do_patch(self):
 
         # If there are no patches, note it.
         if not patches and not has_patch_fun:
-            tty.msg("No patches needed for %s" % self.name)
+            tty.debug('No patches needed for {0}'.format(self.name))
             return
 
         # Construct paths to special files in the archive dir used to
@@ -1208,15 +1208,15 @@ def do_patch(self):
         # If we encounter an archive that failed to patch, restage it
         # so that we can apply all the patches again.
         if os.path.isfile(bad_file):
-            tty.msg("Patching failed last time. Restaging.")
+            tty.debug('Patching failed last time. Restaging.')
             self.stage.restage()
 
         # If this file exists, then we already applied all the patches.
         if os.path.isfile(good_file):
-            tty.msg("Already patched %s" % self.name)
+            tty.debug('Already patched {0}'.format(self.name))
             return
         elif os.path.isfile(no_patches_file):
-            tty.msg("No patches needed for %s" % self.name)
+            tty.debug('No patches needed for {0}'.format(self.name))
             return
 
         # Apply all the patches for specs that match this one
@@ -1225,7 +1225,7 @@ def do_patch(self):
             try:
                 with working_dir(self.stage.source_path):
                     patch.apply(self.stage)
-                tty.msg('Applied patch %s' % patch.path_or_url)
+                tty.debug('Applied patch {0}'.format(patch.path_or_url))
                 patched = True
             except spack.error.SpackError as e:
                 tty.debug(e)
@@ -1239,7 +1239,7 @@ def do_patch(self):
             try:
                 with working_dir(self.stage.source_path):
                     self.patch()
-                tty.msg("Ran patch() for %s" % self.name)
+                tty.debug('Ran patch() for {0}'.format(self.name))
                 patched = True
             except spack.multimethod.NoSuchMethodError:
                 # We are running a multimethod without a default case.
@@ -1249,12 +1249,12 @@ def do_patch(self):
                     # directive, AND the patch function didn't apply, say
                     # no patches are needed.  Otherwise, we already
                     # printed a message for each patch.
-                    tty.msg("No patches needed for %s" % self.name)
+                    tty.debug('No patches needed for {0}'.format(self.name))
             except spack.error.SpackError as e:
                 tty.debug(e)
 
                 # Touch bad file if anything goes wrong.
-                tty.msg("patch() function failed for %s" % self.name)
+                tty.msg('patch() function failed for {0}'.format(self.name))
                 touch(bad_file)
                 raise
 
@@ -1341,7 +1341,7 @@ def _has_make_target(self, target):
             if os.path.exists(makefile):
                 break
         else:
-            tty.msg('No Makefile found in the build directory')
+            tty.debug('No Makefile found in the build directory')
             return False
 
         # Check if 'target' is a valid target.
@@ -1372,7 +1372,8 @@ def _has_make_target(self, target):
 
         for missing_target_msg in missing_target_msgs:
             if missing_target_msg.format(target) in stderr:
-                tty.msg("Target '" + target + "' not found in " + makefile)
+                tty.debug("Target '{0}' not found in {1}"
+                          .format(target, makefile))
                 return False
 
         return True
@@ -1400,7 +1401,7 @@ def _has_ninja_target(self, target):
 
         # Check if we have a Ninja build script
         if not os.path.exists('build.ninja'):
-            tty.msg('No Ninja build script found in the build directory')
+            tty.debug('No Ninja build script found in the build directory')
             return False
 
         # Get a list of all targets in the Ninja build script
@@ -1412,7 +1413,8 @@ def _has_ninja_target(self, target):
                    if line.startswith(target + ':')]
 
         if not matches:
-            tty.msg("Target '" + target + "' not found in build.ninja")
+            tty.debug("Target '{0}' not found in build.ninja"
+                      .format(target))
             return False
 
         return True
@@ -1719,11 +1721,12 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
             if specs:
                 if deprecator:
                     spack.store.db.deprecate(specs[0], deprecator)
-                    tty.msg("Deprecating stale DB entry for "
-                            "%s" % spec.short_spec)
+                    tty.debug('Deprecating stale DB entry for {0}'
+                              .format(spec.short_spec))
                 else:
                     spack.store.db.remove(specs[0])
-                    tty.msg("Removed stale DB entry for %s" % spec.short_spec)
+                    tty.debug('Removed stale DB entry for {0}'
+                              .format(spec.short_spec))
                 return
             else:
                 raise InstallError(str(spec) + " is not installed.")
@@ -1767,7 +1770,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
         if pkg is not None:
             spack.hooks.post_uninstall(spec)
 
-        tty.msg("Successfully uninstalled %s" % spec.short_spec)
+        tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
 
     def do_uninstall(self, force=False):
         """Uninstall this package by spec."""
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index 5cac9e32a1cd32c36c10e3e7b854ad3c8c4bfbc9..01f499bd0bf75cbcf22d6dab7b3f16d44699e194 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -457,6 +457,11 @@ def generate_fetchers():
                 for fetcher in dynamic_fetchers:
                     yield fetcher
 
+        def print_errors(errors):
+            for msg in errors:
+                tty.debug(msg)
+
+        errors = []
         for fetcher in generate_fetchers():
             try:
                 fetcher.stage = self
@@ -467,14 +472,18 @@ def generate_fetchers():
                 # Don't bother reporting when something is not cached.
                 continue
             except spack.error.SpackError as e:
-                tty.msg("Fetching from %s failed." % fetcher)
+                errors.append('Fetching from {0} failed.'.format(fetcher))
                 tty.debug(e)
                 continue
         else:
-            err_msg = "All fetchers failed for %s" % self.name
+            print_errors(errors)
+
+            err_msg = 'All fetchers failed for {0}'.format(self.name)
             self.fetcher = self.default_fetcher
             raise fs.FetchError(err_msg, None)
 
+        print_errors(errors)
+
     def check(self):
         """Check the downloaded archive against a checksum digest.
            No-op if this stage checks code out of a repository."""
@@ -535,9 +544,9 @@ def expand_archive(self):
         downloaded."""
         if not self.expanded:
             self.fetcher.expand()
-            tty.msg("Created stage in %s" % self.path)
+            tty.debug('Created stage in {0}'.format(self.path))
         else:
-            tty.msg("Already staged %s in %s" % (self.name, self.path))
+            tty.debug('Already staged {0} in {1}'.format(self.name, self.path))
 
     def restage(self):
         """Removes the expanded archive path if it exists, then re-expands
@@ -708,13 +717,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
         pass
 
     def fetch(self, *args, **kwargs):
-        tty.msg("No need to fetch for DIY.")
+        tty.debug('No need to fetch for DIY.')
 
     def check(self):
-        tty.msg("No checksum needed for DIY.")
+        tty.debug('No checksum needed for DIY.')
 
     def expand_archive(self):
-        tty.msg("Using source directory: %s" % self.source_path)
+        tty.debug('Using source directory: {0}'.format(self.source_path))
 
     @property
     def expanded(self):
@@ -732,7 +741,7 @@ def destroy(self):
         pass
 
     def cache_local(self):
-        tty.msg("Sources for DIY stages are not cached")
+        tty.debug('Sources for DIY stages are not cached')
 
 
 def ensure_access(file):
@@ -782,12 +791,12 @@ def get_checksums_for_versions(
     max_len = max(len(str(v)) for v in sorted_versions)
     num_ver = len(sorted_versions)
 
-    tty.msg("Found {0} version{1} of {2}:".format(
-            num_ver, '' if num_ver == 1 else 's', name),
-            "",
-            *spack.cmd.elide_list(
-                ["{0:{1}}  {2}".format(str(v), max_len, url_dict[v])
-                 for v in sorted_versions]))
+    tty.debug('Found {0} version{1} of {2}:'.format(
+              num_ver, '' if num_ver == 1 else 's', name),
+              '',
+              *spack.cmd.elide_list(
+                  ['{0:{1}}  {2}'.format(str(v), max_len, url_dict[v])
+                   for v in sorted_versions]))
     print()
 
     if batch:
@@ -802,9 +811,10 @@ def get_checksums_for_versions(
     versions = sorted_versions[:archives_to_fetch]
     urls = [url_dict[v] for v in versions]
 
-    tty.msg("Downloading...")
+    tty.debug('Downloading...')
     version_hashes = []
     i = 0
+    errors = []
     for url, version in zip(urls, versions):
         try:
             if fetch_options:
@@ -825,10 +835,12 @@ def get_checksums_for_versions(
                     hashlib.sha256, stage.archive_file)))
                 i += 1
         except FailedDownloadError:
-            tty.msg("Failed to fetch {0}".format(url))
+            errors.append('Failed to fetch {0}'.format(url))
         except Exception as e:
-            tty.msg("Something failed on {0}, skipping.".format(url),
-                    "  ({0})".format(e))
+            tty.msg('Something failed on {0}, skipping.  ({1})'.format(url, e))
+
+    for msg in errors:
+        tty.debug(msg)
 
     if not version_hashes:
         tty.die("Could not fetch any versions for {0}".format(name))
@@ -843,8 +855,8 @@ def get_checksums_for_versions(
     ])
 
     num_hash = len(version_hashes)
-    tty.msg("Checksummed {0} version{1} of {2}:".format(
-        num_hash, '' if num_hash == 1 else 's', name))
+    tty.debug('Checksummed {0} version{1} of {2}:'.format(
+              num_hash, '' if num_hash == 1 else 's', name))
 
     return version_lines
 
diff --git a/lib/spack/spack/test/cache_fetch.py b/lib/spack/spack/test/cache_fetch.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b4c3cb88773071df1d6a790ebccf0cff527e0d7
--- /dev/null
+++ b/lib/spack/spack/test/cache_fetch.py
@@ -0,0 +1,36 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+import pytest
+
+from llnl.util.filesystem import mkdirp, touch
+
+from spack.stage import Stage
+from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
+
+
+def test_fetch_missing_cache(tmpdir):
+    """Ensure raise a missing cache file."""
+    testpath = str(tmpdir)
+
+    fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file')
+    with Stage(fetcher, path=testpath):
+        with pytest.raises(NoCacheError, match=r'No cache'):
+            fetcher.fetch()
+
+
+def test_fetch(tmpdir):
+    """Ensure a fetch after expanding is effectively a no-op."""
+    testpath = str(tmpdir)
+    cache = os.path.join(testpath, 'cache.tar.gz')
+    touch(cache)
+    url = 'file:///{0}'.format(cache)
+
+    fetcher = CacheURLFetchStrategy(url=url)
+    with Stage(fetcher, path=testpath) as stage:
+        source_path = stage.source_path
+        mkdirp(source_path)
+        fetcher.fetch()
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index eb8813387ba9ed9c19dc293cbdf95721344d1b32..9a46475c18f87e28e8892ec90282e5acf0b01d4a 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -133,8 +133,8 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
 
     # make sure that output from the actual package file appears in the
     # right place in the build log.
-    assert re.search(r"BEFORE INSTALL\n==>( \[.+\])? './configure'", out)
-    assert "'install'\nAFTER INSTALL" in out
+    assert "BEFORE INSTALL" in out
+    assert "AFTER INSTALL" in out
 
 
 @pytest.mark.disable_clean_stage_check
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 2f779c6a5f393184f75b4cb947954b09348bd56e..6d15dff7f62cca059aa7c94054eaeb0e259a7411 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -344,10 +344,9 @@ def test_nosource_pkg_install(
 
     # Make sure install works even though there is no associated code.
     pkg.do_install()
-
-    # Also make sure an error is raised if `do_fetch` is called.
-    pkg.do_fetch()
-    assert "No fetch required for nosource" in capfd.readouterr()[0]
+    out = capfd.readouterr()
+    assert "Installing dependency-install" in out[0]
+    assert "Missing a source id for nosource" in out[1]
 
 
 def test_nosource_pkg_install_post_install(
diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py
index cc4b168e6c8094dc9d01033d45eea26a7c14672e..68b70e084087be25d53368672a4b0d02612c8b68 100644
--- a/lib/spack/spack/test/installer.py
+++ b/lib/spack/spack/test/installer.py
@@ -99,10 +99,21 @@ def test_hms(sec, result):
     assert inst._hms(sec) == result
 
 
-def test_install_msg():
+def test_install_msg(monkeypatch):
+    """Test results of call to install_msg based on debug level."""
     name = 'some-package'
     pid = 123456
-    expected = "{0}: Installing {1}".format(pid, name)
+    install_msg = 'Installing {0}'.format(name)
+
+    monkeypatch.setattr(tty, '_debug', 0)
+    assert inst.install_msg(name, pid) == install_msg
+
+    monkeypatch.setattr(tty, '_debug', 1)
+    assert inst.install_msg(name, pid) == install_msg
+
+    # Expect the PID to be added at debug level 2
+    monkeypatch.setattr(tty, '_debug', 2)
+    expected = "{0}: {1}".format(pid, install_msg)
     assert inst.install_msg(name, pid) == expected
 
 
@@ -151,7 +162,6 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd):
 
     out = capfd.readouterr()[0]
     assert 'has external module in {0}'.format(spec.external_module) in out
-    assert 'is actually installed in {0}'.format(spec.external_path) in out
 
 
 def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
@@ -180,7 +190,7 @@ def _spec(spec):
     spec = spack.spec.Spec('a').concretized()
     assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
 
-    assert 'Installing a from binary cache' in capfd.readouterr()[0]
+    assert 'Extracting a from binary cache' in capfd.readouterr()[0]
 
 
 def test_try_install_from_binary_cache(install_mockery, mock_packages,
diff --git a/lib/spack/spack/test/llnl/util/tty/tty.py b/lib/spack/spack/test/llnl/util/tty/tty.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8366a97383e48b919b157ff6e90333e824507da
--- /dev/null
+++ b/lib/spack/spack/test/llnl/util/tty/tty.py
@@ -0,0 +1,87 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os
+
+import pytest
+import llnl.util.tty as tty
+
+
+def test_get_timestamp(monkeypatch):
+    """Ensure the results of get_timestamp are reasonable."""
+
+    # Debug disabled should return an empty string
+    monkeypatch.setattr(tty, '_debug', 0)
+    assert not tty.get_timestamp(False), 'Expected an empty string'
+
+    # Debug disabled but force the timestamp should return a string
+    assert tty.get_timestamp(True), 'Expected a timestamp/non-empty string'
+
+    pid_str = ' {0}'.format(os.getpid())
+
+    # Level 1 debugging should return a timestamp WITHOUT the pid
+    monkeypatch.setattr(tty, '_debug', 1)
+    out_str = tty.get_timestamp(False)
+    assert out_str and pid_str not in out_str, 'Expected no PID in results'
+
+    # Level 2 debugging should also return a timestamp WITH the pid
+    monkeypatch.setattr(tty, '_debug', 2)
+    out_str = tty.get_timestamp(False)
+    assert out_str and pid_str in out_str, 'Expected PID in results'
+
+
+@pytest.mark.parametrize('msg,enabled,trace,newline', [
+    ('', False, False, False),           # Nothing is output
+    (Exception(''), True, False, True),  # Exception  output
+    ('trace', True, True, False),        # stacktrace output
+    ('newline', True, False, True),      # newline in output
+    ('no newline', True, False, False)   # no newline output
+])
+def test_msg(capfd, monkeypatch, enabled, msg, trace, newline):
+    """Ensure the output from msg with options is appropriate."""
+
+    # temporarily use the parameterized settings
+    monkeypatch.setattr(tty, '_msg_enabled', enabled)
+    monkeypatch.setattr(tty, '_stacktrace', trace)
+
+    expected = [msg if isinstance(msg, str) else 'Exception: ']
+    if newline:
+        expected[0] = '{0}\n'.format(expected[0])
+    if trace:
+        expected.insert(0, '.py')
+
+    tty.msg(msg, newline=newline)
+    out = capfd.readouterr()[0]
+    for msg in expected:
+        assert msg in out
+
+
+@pytest.mark.parametrize('msg,trace,wrap', [
+    (Exception(''), False, False),  # Exception  output
+    ('trace', True, False),         # stacktrace output
+    ('wrap', False, True),          # wrap in output
+])
+def test_info(capfd, monkeypatch, msg, trace, wrap):
+    """Ensure the output from info with options is appropriate."""
+
+    # temporarily use the parameterized settings
+    monkeypatch.setattr(tty, '_stacktrace', trace)
+
+    expected = [msg if isinstance(msg, str) else 'Exception: ']
+    if trace:
+        expected.insert(0, '.py')
+
+    extra = 'This extra argument *should* make for a sufficiently long line' \
+        ' that needs to be wrapped if the option is enabled.'
+    args = [msg, extra]
+
+    num_newlines = 3 if wrap else 2
+
+    tty.info(*args, wrap=wrap, countback=3)
+    out = capfd.readouterr()[0]
+    for msg in expected:
+        assert msg in out
+
+    assert out.count('\n') == num_newlines
diff --git a/lib/spack/spack/test/s3_fetch.py b/lib/spack/spack/test/s3_fetch.py
index 682f1a28429d1d4b497f04a92cfbad424fd6195a..70efad19cefef73f9780cddad19d405cda1b2972 100644
--- a/lib/spack/spack/test/s3_fetch.py
+++ b/lib/spack/spack/test/s3_fetch.py
@@ -3,6 +3,7 @@
 #
 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
 
+import os
 import pytest
 
 import spack.fetch_strategy as spack_fs
@@ -27,3 +28,19 @@ def test_s3fetchstrategy_bad_url(tmpdir):
         assert fetcher.archive_file is None
         with pytest.raises(spack_fs.FetchError):
             fetcher.fetch()
+
+
+def test_s3fetchstrategy_downloaded(tmpdir):
+    """Ensure fetch with archive file already downloaded is a noop."""
+    testpath = str(tmpdir)
+    archive = os.path.join(testpath, 's3.tar.gz')
+
+    class Archived_S3FS(spack_fs.S3FetchStrategy):
+        @property
+        def archive_file(self):
+            return archive
+
+    url = 's3:///{0}'.format(archive)
+    fetcher = Archived_S3FS(url=url)
+    with spack_stage.Stage(fetcher, path=testpath):
+        fetcher.fetch()