diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 75ddca1abce62a737c30d41e19dc2a70c8e955ff..965e3a7f786b7d6769fdc1fbd6d14d04fa980440 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -48,6 +48,10 @@
 stage_path     = join_path(var_path, "stage")
 repos_path     = join_path(var_path, "repos")
 share_path     = join_path(spack_root, "share", "spack")
+cache_path     = join_path(var_path, "cache")
+
+import spack.fetch_strategy
+cache = spack.fetch_strategy.FsCache(cache_path)
 
 prefix = spack_root
 opt_path       = join_path(prefix, "opt")
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index cb9dd26c71752b1133ec1999ed513f538ed0e305..36810321ef7905abcb01090d2102f9eabf29ab0e 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -31,6 +31,7 @@
 
 import spack
 import spack.test
+from spack.fetch_strategy import FetchError
 
 description ="Run unit tests"
 
@@ -50,6 +51,24 @@ def setup_parser(subparser):
         help="verbose output")
 
 
+class MockCache(object):
+    def store(self, copyCmd, relativeDst):
+        pass
+
+    def fetcher(self, targetPath, digest):
+        return MockCacheFetcher()
+
+
+class MockCacheFetcher(object):
+    def set_stage(self, stage):
+        pass
+    
+    def fetch(self):
+        raise FetchError("Mock cache always fails for tests")
+
+    def __str__(self):
+        return "[mock fetcher]"
+
 def test(parser, args):
     if args.list:
         print "Available tests:"
@@ -66,4 +85,5 @@ def test(parser, args):
             
             if not os.path.exists(outputDir):
                 mkdirp(outputDir)
+        spack.cache = MockCache()
         spack.test.run(args.names, outputDir, args.verbose)
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 1953d7c1b3856ab12e1a0e8f46db1991f319547a..6f28ec34b25b42026667c954cbb49116c59532b9 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -310,7 +310,7 @@ def archive(self, destination):
         if not extension(destination) == extension(self.archive_file):
             raise ValueError("Cannot archive without matching extensions.")
 
-        shutil.move(self.archive_file, destination)
+        shutil.copy(self.archive_file, destination)
 
     @_needs_stage
     def check(self):
@@ -348,7 +348,7 @@ def reset(self):
 
     def __repr__(self):
         url = self.url if self.url else "no url"
-        return "URLFetchStrategy<%s>" % url
+        return "%s<%s>" % (self.__class__.__name__, url)
 
     def __str__(self):
         if self.url:
@@ -357,6 +357,24 @@ def __str__(self):
             return "[no url]"
 
 
+class CacheURLFetchStrategy(URLFetchStrategy):
+    """The resource associated with a cache URL may be out of date."""
+    def __init__(self, *args, **kwargs):
+        super(CacheURLFetchStrategy, self).__init__(*args, **kwargs)
+    
+    @_needs_stage
+    def fetch(self):
+        super(CacheURLFetchStrategy, self).fetch()
+        if self.digest:
+            try:
+                self.check()
+            except ChecksumError:
+                # Future fetchers will assume they don't need to download if the
+                # file remains
+                os.remove(self.archive_file)
+                raise
+
+
 class VCSFetchStrategy(FetchStrategy):
 
     def __init__(self, name, *rev_types, **kwargs):
@@ -815,6 +833,32 @@ def for_package_version(pkg, version):
     raise InvalidArgsError(pkg, version)
 
 
+class FsCache(object):
+    def __init__(self, root):
+        self.root = os.path.abspath(root)
+
+    def store(self, fetcher, relativeDst):
+        unique = False
+        uidGroups = [['tag', 'commit'], ['digest'], ['revision']]
+        for grp in uidGroups:
+            try:
+                unique |= any(getattr(fetcher, x) for x in grp)
+            except AttributeError:
+                pass
+            if unique:
+                break
+        if not unique:
+            return
+
+        dst = join_path(self.root, relativeDst)
+        mkdirp(os.path.dirname(dst))
+        fetcher.archive(dst)
+        
+    def fetcher(self, targetPath, digest):
+        url = "file://" + join_path(self.root, targetPath)
+        return CacheURLFetchStrategy(url, digest)
+
+
 class FetchError(spack.error.SpackError):
 
     def __init__(self, msg, long_msg=None):
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 6a61b3d52b99df3e29e2c759b5381bd841c40d69..b73056fd303dc20014fb3b70b93bc0cbcd865ce8 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -748,6 +748,9 @@ def do_fetch(self, mirror_only=False):
         if spack.do_checksum and self.version in self.versions:
             self.stage.check()
 
+        self.stage.cache_local()
+        
+
     def do_stage(self, mirror_only=False):
         """Unpacks the fetched tarball, then changes into the expanded tarball
            directory."""
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index a76ec168ad73f230df82468d058b9f1cd1a87a92..b08cce43b8bb2ff5567792f2792af713477a3e3f 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -304,6 +304,7 @@ def fetch(self, mirror_only=False):
             # Add URL strategies for all the mirrors with the digest
             for url in urls:
                 fetchers.insert(0, fs.URLFetchStrategy(url, digest))
+            fetchers.insert(0, spack.cache.fetcher(self.mirror_path, digest))
 
         for fetcher in fetchers:
             try:
@@ -320,6 +321,7 @@ def fetch(self, mirror_only=False):
             self.fetcher = self.default_fetcher
             raise fs.FetchError(errMessage, None)
 
+
     def check(self):
         """Check the downloaded archive against a checksum digest.
            No-op if this stage checks code out of a repository."""
@@ -333,6 +335,11 @@ def check(self):
         else:
             self.fetcher.check()
 
+
+    def cache_local(self):
+        spack.cache.store(self.fetcher, self.mirror_path)
+
+
     def expand_archive(self):
         """Changes to the stage directory and attempt to expand the downloaded
            archive.  Fail if the stage is not set up or if the archive is not yet
@@ -436,7 +443,7 @@ def expand_archive(self):
                 shutil.move(source_path, destination_path)
 
 
-@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive',  'restage', 'destroy'])
+@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive',  'restage', 'destroy', 'cache_local'])
 class StageComposite:
     """
     Composite for Stage type objects. The first item in this composite is considered to be the root package, and
@@ -511,6 +518,9 @@ def destroy(self):
         # No need to destroy DIY stage.
         pass
 
+    def cache_local(self):
+        tty.msg("Sources for DIY stages are not cached")
+
 
 def _get_mirrors():
     """Get mirrors from spack configuration."""