Skip to content

Commit

Permalink
Fix fetching non-expanded resources from mirrors (#1310)
Browse files Browse the repository at this point in the history
This closes #1308, where fetching a non-expanded resource from a mirror
will cause an error.

This also ensures that when a URL resource is fetched from a mirror,
that it will be named as though it were retrieved from the original
URL. This is particularly useful for non-expanded resources since it
ensures that the resource name is consistent for the installation
(this is less important for expanded resources because the build takes
place inside the expanded resource).
  • Loading branch information
scheibelp authored and tgamblin committed Sep 7, 2016
1 parent d55b17d commit fd02a14
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 22 deletions.
2 changes: 1 addition & 1 deletion lib/spack/spack/cmd/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ class MockCache(object):
def store(self, copyCmd, relativeDst):
pass

def fetcher(self, targetPath, digest):
def fetcher(self, targetPath, digest, **kwargs):
return MockCacheFetcher()


Expand Down
11 changes: 5 additions & 6 deletions lib/spack/spack/fetch_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,12 +170,11 @@ def fetch(self):
tty.msg("Already downloaded %s" % self.archive_file)
return

possible_files = self.stage.expected_archive_files
save_file = None
partial_file = None
if possible_files:
save_file = self.stage.expected_archive_files[0]
partial_file = self.stage.expected_archive_files[0] + '.part'
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'

tty.msg("Trying to fetch from %s" % self.url)

Expand Down Expand Up @@ -858,9 +857,9 @@ def store(self, fetcher, relativeDst):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)

def fetcher(self, targetPath, digest):
def fetcher(self, targetPath, digest, **kwargs):
url = "file://" + join_path(self.root, targetPath)
return CacheURLFetchStrategy(url, digest)
return CacheURLFetchStrategy(url, digest, **kwargs)

def destroy(self):
shutil.rmtree(self.root, ignore_errors=True)
Expand Down
34 changes: 19 additions & 15 deletions lib/spack/spack/stage.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,29 +216,29 @@ def _need_to_create_path(self):
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))
self.path, os.path.basename(self.default_fetcher.url)))

if self.mirror_path:
paths.append(os.path.join(
self.path, os.path.basename(self.mirror_path)))

return paths

@property
def save_filename(self):
possible_filenames = self.expected_archive_files
if possible_filenames:
# This prefers using the URL associated with the default fetcher if
# available, so that the fetched resource name matches the remote
# name
return possible_filenames[0]

@property
def archive_file(self):
"""Path to the source archive within this stage directory."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
paths.append(os.path.join(
self.path, os.path.basename(self.fetcher.url)))

if self.mirror_path:
paths.append(os.path.join(
self.path, os.path.basename(self.mirror_path)))

for path in paths:
for path in self.expected_archive_files:
if os.path.exists(path):
return path
else:
Expand Down Expand Up @@ -301,18 +301,22 @@ def fetch(self, mirror_only=False):
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
expand = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
expand = self.default_fetcher.expand_archive

# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
self.skip_checksum_for_mirror = not bool(digest)

# Add URL strategies for all the mirrors with the digest
for url in urls:
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path,
digest))
fetchers.insert(
0, fs.URLFetchStrategy(url, digest, expand=expand))
fetchers.insert(
0, spack.fetch_cache.fetcher(
self.mirror_path, digest, expand=expand))

# Look for the archive in list_url
package_name = os.path.dirname(self.mirror_path)
Expand Down

0 comments on commit fd02a14

Please sign in to comment.