From edf9548310b59bf7c52d75f02b619520bba467ad Mon Sep 17 00:00:00 2001
From: Omar Padron <omar.padron@kitware.com>
Date: Sun, 3 Nov 2019 20:11:30 -0500
Subject: [PATCH] bugfix: fetch prefers to fetch local mirrors over remote
 resources (#13545)

- [x] insert at beginning of list so fetch grabs local mirrors before remote resources
- [x] update the S3FetchStrategy so that it throws a SpackError if the fetch fails.
      Before, it was throwing URLError, which was not being caught in stage.py.
- [x] move error handling out of S3FetchStrategy and into web_util.read_from_url()
- [x] pass string instead of URLError to SpackWebError
---
 lib/spack/spack/fetch_strategy.py | 4 ++--
 lib/spack/spack/stage.py          | 8 +++-----
 lib/spack/spack/util/web.py       | 7 ++++++-
 3 files changed, 11 insertions(+), 8 deletions(-)

diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 393e3af9d1..5a57703d27 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -1126,7 +1126,7 @@ def fetch(self):
 
         parsed_url = url_util.parse(self.url)
         if parsed_url.scheme != 's3':
-            raise ValueError(
+            raise FetchError(
                 'S3FetchStrategy can only fetch from s3:// urls.')
 
         tty.msg("Fetching %s" % self.url)
@@ -1392,7 +1392,7 @@ class NoCacheError(FetchError):
 
 
 class FailedDownloadError(FetchError):
-    """Raised wen a download fails."""
+    """Raised when a download fails."""
     def __init__(self, url, msg=""):
         super(FailedDownloadError, self).__init__(
             "Failed to fetch file from URL: %s" % url, msg)
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index 7869c5f863..d2dd3e6e7a 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -433,11 +433,9 @@ def fetch(self, mirror_only=False):
 
             # Add URL strategies for all the mirrors with the digest
             for url in urls:
-                fetchers.append(fs.from_url_scheme(
-                    url, digest, expand=expand, extension=extension))
-                # fetchers.insert(
-                #     0, fs.URLFetchStrategy(
-                #         url, digest, expand=expand, extension=extension))
+                fetchers.insert(
+                    0, fs.from_url_scheme(
+                        url, digest, expand=expand, extension=extension))
 
             if self.default_fetcher.cachable:
                 for rel_path in reversed(list(self.mirror_paths)):
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index f2afe769c6..1fe58d6415 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -177,7 +177,12 @@ def read_from_url(url, accept_content_type=None):
 
     # Do the real GET request when we know it's just HTML.
     req.get_method = lambda: "GET"
-    response = _urlopen(req, timeout=_timeout, context=context)
+
+    try:
+        response = _urlopen(req, timeout=_timeout, context=context)
+    except URLError as err:
+        raise SpackWebError('Download failed: {ERROR}'.format(
+            ERROR=str(err)))
 
     if accept_content_type and not is_web_url:
         content_type = response.headers.get('Content-type')
-- 
GitLab