diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index 383809295a866..c02a21d4ec0cc 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -141,6 +141,9 @@ def msg(message, *args, **kwargs): if not msg_enabled(): return + if isinstance(message, Exception): + message = "%s: %s" % (message.__class__.__name__, str(message)) + newline = kwargs.get('newline', True) st_text = "" if _stacktrace: @@ -156,6 +159,9 @@ def msg(message, *args, **kwargs): def info(message, *args, **kwargs): + if isinstance(message, Exception): + message = "%s: %s" % (message.__class__.__name__, str(message)) + format = kwargs.get('format', '*b') stream = kwargs.get('stream', sys.stdout) wrap = kwargs.get('wrap', False) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 602e9605dedfa..bae8edd884540 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -321,7 +321,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, # create info for later relocation and create tar write_buildinfo_file(spec.prefix, workdir, rel=rel) - # optinally make the paths in the binaries relative to each other + # optionally make the paths in the binaries relative to each other # in the spack install tree before creating tarball if rel: try: @@ -329,14 +329,14 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False, except Exception as e: shutil.rmtree(workdir) shutil.rmtree(tarfile_dir) - tty.die(str(e)) + tty.die(e) else: try: make_package_placeholder(workdir, spec.prefix, allow_root) except Exception as e: shutil.rmtree(workdir) shutil.rmtree(tarfile_dir) - tty.die(str(e)) + tty.die(e) # create compressed tarball of the install prefix with closing(tarfile.open(tarfile_path, 'w:gz')) as tar: tar.add(name='%s' % workdir, @@ -521,7 +521,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, Gpg.verify('%s.asc' % specfile_path, specfile_path) except Exception as e: shutil.rmtree(tmpdir) - tty.die(str(e)) + tty.die(e) else: shutil.rmtree(tmpdir) raise NoVerifyException( @@ -575,7 +575,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, relocate_package(workdir, allow_root) except Exception as e: shutil.rmtree(workdir) - tty.die(str(e)) + tty.die(e) # Delay creating spec.prefix until verification is complete # and any relocation has been done. else: @@ -809,7 +809,8 @@ def _download_buildcache_entry(mirror_root, descriptions): try: stage.fetch() - except fs.FetchError: + except fs.FetchError as e: + tty.debug(e) if fail_if_missing: tty.error('Failed to download required url {0}'.format(url)) return False diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py index 3673fb7cce460..cd6cfd94bdfc6 100644 --- a/lib/spack/spack/build_systems/autotools.py +++ b/lib/spack/spack/build_systems/autotools.py @@ -113,8 +113,8 @@ def _do_patch_config_guess(self): check_call([my_config_guess], stdout=PIPE, stderr=PIPE) # The package's config.guess already runs OK, so just use it return - except Exception: - pass + except Exception as e: + tty.debug(e) else: return @@ -142,8 +142,8 @@ def _do_patch_config_guess(self): os.chmod(my_config_guess, mod) shutil.copyfile(config_guess, my_config_guess) return - except Exception: - pass + except Exception as e: + tty.debug(e) raise RuntimeError('Failed to find suitable config.guess') diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 91618fcfd952b..9fd7c5b2d3d7a 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -262,6 +262,7 @@ def install(parser, args, **kwargs): specs = spack.cmd.parse_specs( args.package, concretize=True, tests=tests) except SpackError as e: + tty.debug(e) reporter.concretization_report(e.message) raise diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index f5206efa1cf4c..38d1eab174955 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -107,7 +107,7 @@ def location(parser, args): print(pkg.stage.path) else: # args.build_dir is the default. - if not pkg.stage.source_path: + if not pkg.stage.expanded: tty.die("Build directory does not exist yet. " "Run this to create it:", "spack stage " + " ".join(args.spec)) diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 126347285f72d..e6438a28f4831 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -142,6 +142,7 @@ def _read_specs_from_file(filename): s.package specs.append(s) except SpackError as e: + tty.debug(e) tty.die("Parse error in %s, line %d:" % (filename, i + 1), ">>> " + string, str(e)) return specs diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py index 7fadd73ee0739..cb3d8ebd07ba5 100644 --- a/lib/spack/spack/cmd/modules/__init__.py +++ b/lib/spack/spack/cmd/modules/__init__.py @@ -304,6 +304,7 @@ def refresh(module_type, specs, args): try: x.write(overwrite=True) except Exception as e: + tty.debug(e) msg = 'Could not write module file [{0}]' tty.warn(msg.format(x.layout.filename)) tty.warn('\t--> {0} <--'.format(str(e))) diff --git a/lib/spack/spack/cmd/upload_s3.py b/lib/spack/spack/cmd/upload_s3.py index ba3f0688ae47c..9894341efec14 100644 --- a/lib/spack/spack/cmd/upload_s3.py +++ b/lib/spack/spack/cmd/upload_s3.py @@ -158,7 +158,8 @@ def upload_spec(args): try: spec = Spec(args.spec) spec.concretize() - except Exception: + except Exception as e: + tty.debug(e) tty.error('Unable to concrectize spec from string {0}'.format( args.spec)) sys.exit(1) @@ -166,7 +167,8 @@ def upload_spec(args): try: with open(args.spec_yaml, 'r') as fd: spec = Spec.from_yaml(fd.read()) - except Exception: + except Exception as e: + tty.debug(e) tty.error('Unable to concrectize spec from yaml {0}'.format( args.spec_yaml)) sys.exit(1) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index b7adae18fbbfa..ad6475b02637d 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -606,8 +606,7 @@ def _construct_from_directory_layout(self, directory_layout, old_data): except Exception as e: # Something went wrong, so the spec was not restored # from old data - tty.debug(e.message) - pass + tty.debug(e) self._check_ref_counts() @@ -659,7 +658,8 @@ def _write(self, type, value, traceback): with open(temp_file, 'w') as f: self._write_to_file(f) os.rename(temp_file, self._index_path) - except BaseException: + except BaseException as e: + tty.debug(e) # Clean up temp file if something goes wrong. if os.path.exists(temp_file): os.remove(temp_file) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index dd7ebd248b5ea..9ab9af3b7e703 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -60,6 +60,13 @@ def wrapper(self, *args, **kwargs): return wrapper +def _ensure_one_stage_entry(stage_path): + """Ensure there is only one stage entry in the stage path.""" + stage_entries = os.listdir(stage_path) + assert len(stage_entries) == 1 + return os.path.join(stage_path, stage_entries[0]) + + class FSMeta(type): """This metaclass registers all fetch strategies in a list.""" def __init__(cls, name, bases, dict): @@ -302,6 +309,7 @@ def fetch(self): raise FailedDownloadError(self.url) @property + @_needs_stage def archive_file(self): """Path to the source archive within this stage directory.""" return self.stage.archive_file @@ -313,7 +321,13 @@ def cachable(self): @_needs_stage def expand(self): if not self.expand_archive: - tty.msg("Skipping expand step for %s" % self.archive_file) + tty.msg("Staging unexpanded archive %s in %s" % ( + self.archive_file, self.stage.source_path)) + if not self.stage.expanded: + mkdirp(self.stage.source_path) + dest = os.path.join(self.stage.source_path, + os.path.basename(self.archive_file)) + shutil.move(self.archive_file, dest) return tty.msg("Staging archive: %s" % self.archive_file) @@ -326,6 +340,10 @@ def expand(self): if not self.extension: self.extension = extension(self.archive_file) + if self.stage.expanded: + tty.debug('Source already staged to %s' % self.stage.source_path) + return + decompress = decompressor_for(self.archive_file, self.extension) # Expand all tarballs in their own directory to contain @@ -337,26 +355,37 @@ def expand(self): with working_dir(tarball_container): decompress(self.archive_file) - # Check for an exploding tarball, i.e. one that doesn't expand - # to a single directory. If the tarball *didn't* explode, - # move contents up & remove the container directory. + # Check for an exploding tarball, i.e. one that doesn't expand to + # a single directory. If the tarball *didn't* explode, move its + # contents to the staging source directory & remove the container + # directory. If the tarball did explode, just rename the tarball + # directory to the staging source directory. # - # NOTE: The tar program on Mac OS X will encode HFS metadata - # in hidden files, which can end up *alongside* a single - # top-level directory. We ignore hidden files to accomodate - # these "semi-exploding" tarballs. + # NOTE: The tar program on Mac OS X will encode HFS metadata in + # hidden files, which can end up *alongside* a single top-level + # directory. We initially ignore presence of hidden files to + # accomodate these "semi-exploding" tarballs but ensure the files + # are copied to the source directory. files = os.listdir(tarball_container) non_hidden = [f for f in files if not f.startswith('.')] if len(non_hidden) == 1: - expanded_dir = os.path.join(tarball_container, non_hidden[0]) - if os.path.isdir(expanded_dir): - for f in files: - shutil.move(os.path.join(tarball_container, f), - os.path.join(self.stage.path, f)) + src = os.path.join(tarball_container, non_hidden[0]) + if os.path.isdir(src): + shutil.move(src, self.stage.source_path) + if len(files) > 1: + files.remove(non_hidden[0]) + for f in files: + src = os.path.join(tarball_container, f) + dest = os.path.join(self.stage.path, f) + shutil.move(src, dest) os.rmdir(tarball_container) + else: + # This is a non-directory entry (e.g., a patch file) so simply + # rename the tarball container to be the source path. + shutil.move(tarball_container, self.stage.source_path) - if not files: - os.rmdir(tarball_container) + else: + shutil.move(tarball_container, self.stage.source_path) def archive(self, destination): """Just moves this archive to the destination.""" @@ -390,7 +419,7 @@ def reset(self): "Tried to reset URLFetchStrategy before fetching", "Failed on reset() for URL %s" % self.url) - # Remove everythigng but the archive from the stage + # Remove everything but the archive from the stage for filename in os.listdir(self.stage.path): abspath = os.path.join(self.stage.path, filename) if abspath != self.archive_file: @@ -549,6 +578,15 @@ def fetch(self): def archive(self, destination): super(GoFetchStrategy, self).archive(destination, exclude='.git') + @_needs_stage + def expand(self): + tty.debug( + "Source fetched with %s is already expanded." % self.url_attr) + + # Move the directory to the well-known stage source path + repo_root = _ensure_one_stage_entry(self.stage.path) + shutil.move(repo_root, self.stage.source_path) + @_needs_stage def reset(self): with working_dir(self.stage.source_path): @@ -634,8 +672,9 @@ def _repo_info(self): return '{0}{1}'.format(self.url, args) + @_needs_stage def fetch(self): - if self.stage.source_path: + if self.stage.expanded: tty.msg("Already fetched {0}".format(self.stage.source_path)) return @@ -645,17 +684,16 @@ def fetch(self): if self.commit: # Need to do a regular clone and check out everything if # they asked for a particular commit. - with working_dir(self.stage.path): - if spack.config.get('config:debug'): - git('clone', self.url) - else: - git('clone', '--quiet', self.url) + args = ['clone', self.url, self.stage.source_path] + if not spack.config.get('config:debug'): + args.insert(1, '--quiet') + git(*args) with working_dir(self.stage.source_path): - if spack.config.get('config:debug'): - git('checkout', self.commit) - else: - git('checkout', '--quiet', self.commit) + args = ['checkout', self.commit] + if not spack.config.get('config:debug'): + args.insert(1, '--quiet') + git(*args) else: # Can be more efficient if not checking out a specific commit. @@ -674,45 +712,46 @@ def fetch(self): if self.git_version > ver('1.7.10'): args.append('--single-branch') - with working_dir(self.stage.path): - cloned = False - # Yet more efficiency, only download a 1-commit deep tree - if self.git_version >= ver('1.7.1'): - try: - git(*(args + ['--depth', '1', self.url])) - cloned = True - except spack.error.SpackError: - # This will fail with the dumb HTTP transport - # continue and try without depth, cleanup first - pass - - if not cloned: - args.append(self.url) - git(*args) - - with working_dir(self.stage.source_path): - # For tags, be conservative and check them out AFTER - # cloning. Later git versions can do this with clone - # --branch, but older ones fail. - if self.tag and self.git_version < ver('1.8.5.2'): - # pull --tags returns a "special" error code of 1 in - # older versions that we have to ignore. - # see: https://github.com/git/git/commit/19d122b - if spack.config.get('config:debug'): - git('pull', '--tags', ignore_errors=1) - git('checkout', self.tag) - else: - git('pull', '--quiet', '--tags', ignore_errors=1) - git('checkout', '--quiet', self.tag) + cloned = False + # Yet more efficiency, only download a 1-commit deep tree + if self.git_version >= ver('1.7.1'): + try: + git(*(args + ['--depth', '1', self.url, + self.stage.source_path])) + cloned = True + except spack.error.SpackError as e: + # This will fail with the dumb HTTP transport + # continue and try without depth, cleanup first + tty.debug(e) + + if not cloned: + args.extend([self.url, self.stage.source_path]) + git(*args) - with working_dir(self.stage.source_path): - # Init submodules if the user asked for them. - if self.submodules: - if spack.config.get('config:debug'): - git('submodule', 'update', '--init', '--recursive') - else: - git('submodule', '--quiet', 'update', '--init', - '--recursive') + with working_dir(self.stage.source_path): + # For tags, be conservative and check them out AFTER + # cloning. Later git versions can do this with clone + # --branch, but older ones fail. + if self.tag and self.git_version < ver('1.8.5.2'): + # pull --tags returns a "special" error code of 1 in + # older versions that we have to ignore. + # see: https://github.com/git/git/commit/19d122b + pull_args = ['pull', '--tags'] + co_args = ['checkout', self.tag] + if not spack.config.get('config:debug'): + pull_args.insert(1, '--quiet') + co_args.insert(1, '--quiet') + + git(*pull_args, ignore_errors=1) + git(*co_args) + + # Init submodules if the user asked for them. + if self.submodules: + with working_dir(self.stage.source_path): + args = ['submodule', 'update', '--init', '--recursive'] + if not spack.config.get('config:debug'): + args.insert(1, '--quiet') + git(*args) def archive(self, destination): super(GitFetchStrategy, self).archive(destination, exclude='.git') @@ -720,12 +759,14 @@ def archive(self, destination): @_needs_stage def reset(self): with working_dir(self.stage.source_path): + co_args = ['checkout', '.'] + clean_args = ['clean', '-f'] if spack.config.get('config:debug'): - self.git('checkout', '.') - self.git('clean', '-f') - else: - self.git('checkout', '--quiet', '.') - self.git('clean', '--quiet', '-f') + co_args.insert(1, '--quiet') + clean_args.insert(1, '--quiet') + + self.git(*co_args) + self.git(*clean_args) def __str__(self): return '[git] {0}'.format(self._repo_info()) @@ -778,7 +819,7 @@ def get_source_id(self): @_needs_stage def fetch(self): - if self.stage.source_path: + if self.stage.expanded: tty.msg("Already fetched %s" % self.stage.source_path) return @@ -787,10 +828,8 @@ def fetch(self): args = ['checkout', '--force', '--quiet'] if self.revision: args += ['-r', self.revision] - args.append(self.url) - - with working_dir(self.stage.path): - self.svn(*args) + args.extend([self.url, self.stage.source_path]) + self.svn(*args) def _remove_untracked_files(self): """Removes untracked files in an svn repository.""" @@ -881,7 +920,7 @@ def get_source_id(self): @_needs_stage def fetch(self): - if self.stage.source_path: + if self.stage.expanded: tty.msg("Already fetched %s" % self.stage.source_path) return @@ -895,13 +934,11 @@ def fetch(self): if not spack.config.get('config:verify_ssl'): args.append('--insecure') - args.append(self.url) - if self.revision: args.extend(['-r', self.revision]) - with working_dir(self.stage.path): - self.hg(*args) + args.extend([self.url, self.stage.source_path]) + self.hg(*args) def archive(self, destination): super(HgFetchStrategy, self).archive(destination, exclude='.hg') @@ -1082,11 +1119,13 @@ def from_list_url(pkg): # construct a fetcher return URLFetchStrategy(url_from_list, checksum) - except KeyError: + except KeyError as e: + tty.debug(e) tty.msg("Cannot find version %s in url_list" % pkg.version) - except BaseException: + except BaseException as e: # TODO: Don't catch BaseException here! Be more specific. + tty.debug(e) tty.msg("Could not determine url from list_url.") diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 9b2e5f1bc6843..9c84c2fb197f6 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -505,6 +505,7 @@ def __call__(self, *argv, **kwargs): self.returncode = e.code except BaseException as e: + tty.debug(e) self.error = e if fail_on_error: raise @@ -695,12 +696,13 @@ def main(argv=None): return _invoke_command(command, parser, args, unknown) except SpackError as e: + tty.debug(e) e.die() # gracefully die on any SpackErrors except Exception as e: if spack.config.get('config:debug'): raise - tty.die(str(e)) + tty.die(e) except KeyboardInterrupt: sys.stderr.write('\n') diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index f8e795ed56cbd..6d5b2b6c4f719 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -218,6 +218,7 @@ def add_single_spec(spec, mirror_root, categories, **kwargs): spec.package.do_clean() except Exception as e: + tty.debug(e) if spack.config.get('config:debug'): sys.excepthook(*sys.exc_info()) else: diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 226e0d16438a6..4cca72c6cfbf7 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1067,7 +1067,9 @@ def do_patch(self): patch.apply(self.stage) tty.msg('Applied patch %s' % patch.path_or_url) patched = True - except spack.error.SpackError: + except spack.error.SpackError as e: + tty.debug(e) + # Touch bad file if anything goes wrong. tty.msg('Patch %s failed.' % patch.path_or_url) touch(bad_file) @@ -1088,7 +1090,10 @@ def do_patch(self): # no patches are needed. Otherwise, we already # printed a message for each patch. tty.msg("No patches needed for %s" % self.name) - except spack.error.SpackError: + except spack.error.SpackError as e: + tty.debug(e) + + # Touch bad file if anything goes wrong. tty.msg("patch() function failed for %s" % self.name) touch(bad_file) raise @@ -1724,9 +1729,9 @@ def log(self): try: # log_install_path and env_install_path are inside this shutil.rmtree(packages_dir) - except Exception: + except Exception as e: # FIXME : this potentially catches too many things... - pass + tty.debug(e) # Archive the whole stdout + stderr for the package install(self.log_path, log_install_path) @@ -1762,7 +1767,9 @@ def log(self): # copying a file in mkdirp(os.path.dirname(target)) install(f, target) - except Exception: + except Exception as e: + tty.debug(e) + # Here try to be conservative, and avoid discarding # the whole install procedure because of copying a # single file failed diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index 7a2e235a4e642..1e593634b9622 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -893,8 +893,10 @@ def get(self, spec): except spack.error.SpackError: # pass these through as their error messages will be fine. raise - except Exception: - # make sure other errors in constructors hit the error + except Exception as e: + tty.debug(e) + + # Make sure other errors in constructors hit the error # handler by wrapping them if spack.config.get('config:debug'): sys.excepthook(*sys.exc_info()) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 0412465ba076b..c08d074415894 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1723,6 +1723,7 @@ def from_json(stream): data = sjson.load(stream) return Spec.from_dict(data) except Exception as e: + tty.debug(e) raise sjson.SpackJSONError("error parsing JSON spec:", str(e)) def _concretize_helper(self, presets=None, visited=None): diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index fb0df21e6f4c3..c36d9ac89c243 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -28,6 +28,7 @@ from spack.util.path import canonicalize_path from spack.util.crypto import prefix_bits, bit_length +_source_path_subdir = 'src' _stage_prefix = 'spack-stage-' @@ -46,8 +47,9 @@ def _first_accessible_path(paths): # return it if successful. return path - except OSError: - tty.debug('OSError while checking temporary path: %s' % path) + except OSError as e: + tty.debug('OSError while checking temporary path %s: %s' % ( + path, str(e))) continue return None @@ -78,7 +80,7 @@ def get_tmp_root(): _use_tmp_stage = False return None - # ensure that any temp path is unique per user, so users don't + # Ensure that any temp path is unique per user, so users don't # fight over shared temporary space. user = getpass.getuser() if user not in path: @@ -292,13 +294,18 @@ def _need_to_create_path(self): def expected_archive_files(self): """Possible archive file paths.""" paths = [] - if isinstance(self.default_fetcher, fs.URLFetchStrategy): - paths.append(os.path.join( - self.path, os.path.basename(self.default_fetcher.url))) + roots = [self.path] + if self.expanded: + roots.insert(0, self.source_path) - if self.mirror_path: - paths.append(os.path.join( - self.path, os.path.basename(self.mirror_path))) + for path in roots: + if isinstance(self.default_fetcher, fs.URLFetchStrategy): + paths.append(os.path.join( + path, os.path.basename(self.default_fetcher.url))) + + if self.mirror_path: + paths.append(os.path.join( + path, os.path.basename(self.mirror_path))) return paths @@ -321,27 +328,14 @@ def archive_file(self): return None @property - def source_path(self): - """Returns the path to the expanded/checked out source code. - - To find the source code, this method searches for the first - subdirectory of the stage that it can find, and returns it. - This assumes nothing besides the archive file will be in the - stage path, but it has the advantage that we don't need to - know the name of the archive or its contents. - - If the fetch strategy is not supposed to expand the downloaded - file, it will just return the stage path. If the archive needs - to be expanded, it will return None when no archive is found. - """ - if isinstance(self.fetcher, fs.URLFetchStrategy): - if not self.fetcher.expand_archive: - return self.path + def expanded(self): + """Returns True if source path expanded; else False.""" + return os.path.exists(self.source_path) - for p in [os.path.join(self.path, f) for f in os.listdir(self.path)]: - if os.path.isdir(p): - return p - return None + @property + def source_path(self): + """Returns the well-known source directory path.""" + return os.path.join(self.path, _source_path_subdir) def fetch(self, mirror_only=False): """Downloads an archive or checks out code from a repository.""" @@ -441,8 +435,7 @@ def expand_archive(self): """Changes to the stage directory and attempt to expand the downloaded archive. Fail if the stage is not set up or if the archive is not yet downloaded.""" - archive_dir = self.source_path - if not archive_dir: + if not self.expanded: self.fetcher.expand() tty.msg("Created stage in %s" % self.path) else: @@ -490,7 +483,8 @@ def destroy(self): # Make sure we don't end up in a removed directory try: os.getcwd() - except OSError: + except OSError as e: + tty.debug(e) os.chdir(os.path.dirname(self.path)) # mark as destroyed @@ -530,6 +524,7 @@ def _add_to_root_stage(self): try: os.makedirs(target_path) except OSError as err: + tty.debug(err) if err.errno == errno.EEXIST and os.path.isdir(target_path): pass else: @@ -633,12 +628,6 @@ def cache_local(self): tty.msg("Sources for DIY stages are not cached") -def _get_mirrors(): - """Get mirrors from spack configuration.""" - config = spack.config.get('mirrors') - return [val for name, val in iteritems(config)] - - def ensure_access(file=spack.paths.stage_path): """Ensure we can access a directory and die with an error if we can't.""" if not can_access(file): diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index c628d7e3ae147..3ff1b1aaedbe1 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -519,12 +519,12 @@ def mock_archive(tmpdir_factory): tar = spack.util.executable.which('tar', required=True) tmpdir = tmpdir_factory.mktemp('mock-archive-dir') - expanded_archive_basedir = 'mock-archive-repo' - tmpdir.ensure(expanded_archive_basedir, dir=True) - repodir = tmpdir.join(expanded_archive_basedir) + tmpdir.ensure(spack.stage._source_path_subdir, dir=True) + repodir = tmpdir.join(spack.stage._source_path_subdir) # Create the configure script - configure_path = str(tmpdir.join(expanded_archive_basedir, 'configure')) + configure_path = str(tmpdir.join(spack.stage._source_path_subdir, + 'configure')) with open(configure_path, 'w') as f: f.write( "#!/bin/sh\n" @@ -541,8 +541,8 @@ def mock_archive(tmpdir_factory): # Archive it with tmpdir.as_cwd(): - archive_name = '{0}.tar.gz'.format(expanded_archive_basedir) - tar('-czf', archive_name, expanded_archive_basedir) + archive_name = '{0}.tar.gz'.format(spack.stage._source_path_subdir) + tar('-czf', archive_name, spack.stage._source_path_subdir) Archive = collections.namedtuple('Archive', ['url', 'path', 'archive_file', @@ -554,7 +554,7 @@ def mock_archive(tmpdir_factory): url=('file://' + archive_file), archive_file=archive_file, path=str(repodir), - expanded_archive_basedir=expanded_archive_basedir) + expanded_archive_basedir=spack.stage._source_path_subdir) @pytest.fixture(scope='session') @@ -565,9 +565,8 @@ def mock_git_repository(tmpdir_factory): git = spack.util.executable.which('git', required=True) tmpdir = tmpdir_factory.mktemp('mock-git-repo-dir') - expanded_archive_basedir = 'mock-git-repo' - tmpdir.ensure(expanded_archive_basedir, dir=True) - repodir = tmpdir.join(expanded_archive_basedir) + tmpdir.ensure(spack.stage._source_path_subdir, dir=True) + repodir = tmpdir.join(spack.stage._source_path_subdir) # Initialize the repository with repodir.as_cwd(): @@ -639,9 +638,8 @@ def mock_hg_repository(tmpdir_factory): hg = spack.util.executable.which('hg', required=True) tmpdir = tmpdir_factory.mktemp('mock-hg-repo-dir') - expanded_archive_basedir = 'mock-hg-repo' - tmpdir.ensure(expanded_archive_basedir, dir=True) - repodir = tmpdir.join(expanded_archive_basedir) + tmpdir.ensure(spack.stage._source_path_subdir, dir=True) + repodir = tmpdir.join(spack.stage._source_path_subdir) get_rev = lambda: hg('id', '-i', output=str).strip() @@ -685,9 +683,8 @@ def mock_svn_repository(tmpdir_factory): svnadmin = spack.util.executable.which('svnadmin', required=True) tmpdir = tmpdir_factory.mktemp('mock-svn-stage') - expanded_archive_basedir = 'mock-svn-repo' - tmpdir.ensure(expanded_archive_basedir, dir=True) - repodir = tmpdir.join(expanded_archive_basedir) + tmpdir.ensure(spack.stage._source_path_subdir, dir=True) + repodir = tmpdir.join(spack.stage._source_path_subdir) url = 'file://' + str(repodir) # Initialize the repository diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index cccdb7b829901..e69a56596fb9d 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -4,14 +4,16 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os +import shutil import pytest -from llnl.util.filesystem import working_dir, touch +from llnl.util.filesystem import working_dir, touch, mkdirp import spack.repo import spack.config from spack.spec import Spec +from spack.stage import Stage from spack.version import ver from spack.fetch_strategy import GitFetchStrategy from spack.util.executable import which @@ -21,8 +23,11 @@ not which('git'), reason='requires git to be installed') +_mock_transport_error = 'Mock HTTP transport error' + + @pytest.fixture(params=[None, '1.8.5.2', '1.8.5.1', '1.7.10', '1.7.0']) -def git_version(request): +def git_version(request, monkeypatch): """Tests GitFetchStrategy behavior for different git versions. GitFetchStrategy tries to optimize using features of newer git @@ -40,13 +45,38 @@ def git_version(request): if test_git_version > real_git_version: pytest.skip("Can't test clone logic for newer version of git.") - # patch the fetch strategy to think it's using a lower git version. + # Patch the fetch strategy to think it's using a lower git version. # we use this to test what we'd need to do with older git versions # using a newer git installation. - git_version_method = GitFetchStrategy.git_version - GitFetchStrategy.git_version = test_git_version + monkeypatch.setattr(GitFetchStrategy, 'git_version', ver('1.7.1')) yield - GitFetchStrategy.git_version = git_version_method + + +@pytest.fixture +def mock_bad_git(monkeypatch): + """ + Test GitFetchStrategy behavior with a bad git command for git >= 1.7.1 + to trigger a SpackError. + """ + def bad_git(*args, **kwargs): + """Raise a SpackError with the transport message.""" + raise spack.error.SpackError(_mock_transport_error) + + # Patch the fetch strategy to think it's using a git version that + # will error out when git is called. + monkeypatch.setattr(GitFetchStrategy, 'git', bad_git) + monkeypatch.setattr(GitFetchStrategy, 'git_version', ver('1.7.1')) + yield + + +def test_bad_git(tmpdir, mock_bad_git): + """Trigger a SpackError when attempt a fetch with a bad git.""" + testpath = str(tmpdir) + + with pytest.raises(spack.error.SpackError): + fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo') + with Stage(fetcher, path=testpath): + fetcher.fetch() @pytest.mark.parametrize("type_of_test", ['master', 'branch', 'tag', 'commit']) @@ -101,3 +131,41 @@ def test_fetch(type_of_test, assert os.path.isfile(file_path) assert h('HEAD') == h(t.revision) + + +@pytest.mark.parametrize("type_of_test", ['branch', 'commit']) +def test_debug_fetch(type_of_test, mock_git_repository, config): + """Fetch the repo with debug enabled.""" + # Retrieve the right test parameters + t = mock_git_repository.checks[type_of_test] + + # Construct the package under test + spec = Spec('git-test') + spec.concretize() + pkg = spack.repo.get(spec) + pkg.versions[ver('git')] = t.args + + # Fetch then ensure source path exists + with pkg.stage: + with spack.config.override('config:debug', True): + pkg.do_fetch() + assert os.path.isdir(pkg.stage.source_path) + + +def test_git_extra_fetch(tmpdir): + """Ensure a fetch after 'expanding' is effectively a no-op.""" + testpath = str(tmpdir) + + fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo') + with Stage(fetcher, path=testpath) as stage: + mkdirp(stage.source_path) + fetcher.fetch() # Use fetcher to fetch for code coverage + shutil.rmtree(stage.source_path) + + +def test_needs_stage(): + """Trigger a NoStageError when attempt a fetch without a stage.""" + with pytest.raises(spack.fetch_strategy.NoStageError, + matches=_mock_transport_error): + fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo') + fetcher.fetch() diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index 779dfd5a4b1eb..109e3d9d59d89 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -7,12 +7,14 @@ import pytest -from llnl.util.filesystem import working_dir, touch +from llnl.util.filesystem import working_dir, touch, mkdirp import spack.repo import spack.config from spack.spec import Spec +from spack.stage import Stage from spack.version import ver +from spack.fetch_strategy import HgFetchStrategy from spack.util.executable import which @@ -73,3 +75,14 @@ def test_fetch( assert os.path.isfile(file_path) assert h() == t.revision + + +def test_hg_extra_fetch(tmpdir): + """Ensure a fetch after expanding is effectively a no-op.""" + testpath = str(tmpdir) + + fetcher = HgFetchStrategy(hg='file:///not-a-real-hg-repo') + with Stage(fetcher, path=testpath) as stage: + source_path = stage.source_path + mkdirp(source_path) + fetcher.fetch() diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index a318b1dbf4656..a8697fd029a3f 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -155,7 +155,8 @@ def successful_fetch(_class): pass def successful_expand(_class): - expanded_path = os.path.join(_class.stage.path, 'expanded-dir') + expanded_path = os.path.join(_class.stage.path, + spack.stage._source_path_subdir) os.mkdir(expanded_path) with open(os.path.join(expanded_path, 'test.patch'), 'w'): pass diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py index a62ebb8599737..16b176ca10d44 100644 --- a/lib/spack/spack/test/patch.py +++ b/lib/spack/spack/test/patch.py @@ -63,9 +63,9 @@ def test_url_patch(mock_stage, filename, sha256, archive_sha256): # TODO: there is probably a better way to mock this. stage.mirror_path = mock_stage # don't disrupt the spack install - # fake a source path + # Fake a source path and ensure the directory exists with working_dir(stage.path): - mkdirp('spack-expanded-archive') + mkdirp(spack.stage._source_path_subdir) with working_dir(stage.source_path): # write a file to be patched diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index 3499364d5fe13..2862abc1d329d 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -6,6 +6,9 @@ """Test that the Stage class works correctly.""" import os import collections +import shutil +import tempfile +import getpass import pytest @@ -18,27 +21,108 @@ from spack.resource import Resource from spack.stage import Stage, StageComposite, ResourceStage +# The following values are used for common fetch and stage mocking fixtures: +_archive_base = 'test-files' +_archive_fn = '%s.tar.gz' % _archive_base +_extra_fn = 'extra.sh' +_hidden_fn = '.hidden' +_readme_fn = 'README.txt' -def check_expand_archive(stage, stage_name, mock_archive): +_extra_contents = '#!/bin/sh\n' +_hidden_contents = '' +_readme_contents = 'hello world!\n' + +# TODO: Replace the following with an enum once guarantee supported (or +# include enum34 for python versions < 3.4. +_include_readme = 1 +_include_hidden = 2 +_include_extra = 3 + +# Mock fetch directories are expected to appear as follows: +# +# TMPDIR/ +# _archive_fn archive_url = file:///path/to/_archive_fn +# +# Mock expanded stage directories are expected to have one of two forms, +# depending on how the tarball expands. Non-exploding tarballs are expected +# to have the following structure: +# +# TMPDIR/ temp stage dir +# src/ well-known stage source directory +# _readme_fn Optional test_readme (contains _readme_contents) +# _hidden_fn Optional hidden file (contains _hidden_contents) +# _archive_fn archive_url = file:///path/to/_archive_fn +# +# while exploding tarball directories are expected to be structured as follows: +# +# TMPDIR/ temp stage dir +# src/ well-known stage source directory +# archive_name/ archive dir +# _readme_fn test_readme (contains _readme_contents) +# _extra_fn test_extra file (contains _extra_contents) +# _archive_fn archive_url = file:///path/to/_archive_fn +# + + +def check_expand_archive(stage, stage_name, expected_file_list): + """ + Ensure the expanded archive directory contains the expected structure and + files as described in the module-level comments above. + """ stage_path = get_stage_path(stage, stage_name) - archive_name = 'test-files.tar.gz' - archive_dir = 'test-files' - assert archive_name in os.listdir(stage_path) - assert archive_dir in os.listdir(stage_path) + archive_dir = spack.stage._source_path_subdir - assert os.path.join(stage_path, archive_dir) == stage.source_path + stage_contents = os.listdir(stage_path) + assert _archive_fn in stage_contents + assert archive_dir in stage_contents - readme = os.path.join(stage_path, archive_dir, 'README.txt') - assert os.path.isfile(readme) - with open(readme) as file: - 'hello world!\n' == file.read() + source_path = os.path.join(stage_path, archive_dir) + assert source_path == stage.source_path + + source_contents = os.listdir(source_path) + + for _include in expected_file_list: + if _include == _include_hidden: + # The hidden file represent the HFS metadata associated with Mac + # OS X tar files so is expected to be in the same directory as + # the archive directory. + assert _hidden_fn in stage_contents + + fn = os.path.join(stage_path, _hidden_fn) + contents = _hidden_contents + + elif _include == _include_readme: + # The standard README.txt file will be in the source directory if + # the tarball didn't explode; otherwise, it will be in the + # original archive subdirectory of it. + if _archive_base in source_contents: + fn = os.path.join(source_path, _archive_base, _readme_fn) + else: + fn = os.path.join(source_path, _readme_fn) + contents = _readme_contents + + elif _include == _include_extra: + assert _extra_fn in source_contents + + fn = os.path.join(source_path, _extra_fn) + contents = _extra_contents + + else: + assert False + + assert os.path.isfile(fn) + with open(fn) as _file: + _file.read() == contents def check_fetch(stage, stage_name): - archive_name = 'test-files.tar.gz' + """ + Ensure the fetch resulted in a properly placed archive file as described in + the module-level comments. + """ stage_path = get_stage_path(stage, stage_name) - assert archive_name in os.listdir(stage_path) - assert os.path.join(stage_path, archive_name) == stage.fetcher.archive_file + assert _archive_fn in os.listdir(stage_path) + assert os.path.join(stage_path, _archive_fn) == stage.fetcher.archive_file def check_destroy(stage, stage_name): @@ -76,7 +160,7 @@ def check_setup(stage, stage_name, archive): else: # Make sure the stage path is NOT a link for a non-tmp stage - assert os.path.islink(stage_path) + assert not os.path.islink(stage_path) def get_stage_path(stage, stage_name): @@ -93,71 +177,187 @@ def get_stage_path(stage, stage_name): return stage.path -@pytest.fixture() -def tmpdir_for_stage(mock_archive): - """Uses a temporary directory for staging""" - current = spack.paths.stage_path +@pytest.fixture +def no_tmp_root_stage(monkeypatch): + """ + Disable use of a temporary root for staging. + + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + monkeypatch.setattr(spack.stage, '_tmp_root', None) + monkeypatch.setattr(spack.stage, '_use_tmp_stage', False) + yield + + +@pytest.fixture +def non_user_path_for_stage(): + """ + Use a non-user path for staging. + + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + current = spack.config.get('config:build_stage') + spack.config.set('config', {'build_stage': ['/var/spack/non-path']}, + scope='user') + yield + spack.config.set('config', {'build_stage': current}, scope='user') + + +@pytest.fixture +def stage_path_for_stage(): + """ + Use the basic stage_path for staging. + + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + current = spack.config.get('config:build_stage') + spack.config.set('config', + {'build_stage': spack.paths.stage_path}, scope='user') + yield + spack.config.set('config', {'build_stage': current}, scope='user') + + +@pytest.fixture +def tmp_path_for_stage(tmpdir): + """ + Use a built-in, temporary, test directory for staging. + + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + current = spack.config.get('config:build_stage') + spack.config.set('config', {'build_stage': [str(tmpdir)]}, scope='user') + yield + spack.config.set('config', {'build_stage': current}, scope='user') + + +@pytest.fixture +def tmp_root_stage(monkeypatch): + """ + Enable use of a temporary root for staging. + + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + monkeypatch.setattr(spack.stage, '_tmp_root', None) + monkeypatch.setattr(spack.stage, '_use_tmp_stage', True) + yield + + +@pytest.fixture +def tmpdir_for_stage(mock_stage_archive): + """ + Use the mock_stage_archive's temporary directory for staging. + + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + archive = mock_stage_archive() + current = spack.config.get('config:build_stage') spack.config.set( 'config', - {'build_stage': [str(mock_archive.test_tmp_dir)]}, + {'build_stage': [str(archive.test_tmp_dir)]}, scope='user') yield - spack.config.set('config', {'build_stage': [current]}, scope='user') + spack.config.set('config', {'build_stage': current}, scope='user') -@pytest.fixture() -def mock_archive(tmpdir, monkeypatch): - """Creates a mock archive with the structure expected by the tests""" - # Mock up a stage area that looks like this: - # - # TMPDIR/ test_files_dir - # tmp/ test_tmp_path (where stage should be) - # test-files/ archive_dir_path - # README.txt test_readme (contains "hello world!\n") - # test-files.tar.gz archive_url = file:///path/to/this - # +@pytest.fixture +def tmp_build_stage_dir(tmpdir): + """Establish the temporary build_stage for the mock archive.""" test_tmp_path = tmpdir.join('tmp') - # set _test_tmp_path as the default test directory to use for stages. - spack.config.set( - 'config', {'build_stage': [str(test_tmp_path)]}, scope='user') - archive_dir = tmpdir.join('test-files') - archive_name = 'test-files.tar.gz' - archive = tmpdir.join(archive_name) - archive_url = 'file://' + str(archive) - test_readme = archive_dir.join('README.txt') - archive_dir.ensure(dir=True) - test_tmp_path.ensure(dir=True) - test_readme.write('hello world!\n') + # Set test_tmp_path as the default test directory to use for stages. + current = spack.config.get('config:build_stage') + spack.config.set('config', + {'build_stage': [str(test_tmp_path)]}, scope='user') - with tmpdir.as_cwd(): - tar = spack.util.executable.which('tar', required=True) - tar('czf', str(archive_name), 'test-files') + yield (tmpdir, test_tmp_path) - # Make spack use the test environment for tmp stuff. - monkeypatch.setattr(spack.stage, '_tmp_root', None) - monkeypatch.setattr(spack.stage, '_use_tmp_stage', True) + spack.config.set('config', {'build_stage': current}, scope='user') - Archive = collections.namedtuple( - 'Archive', ['url', 'tmpdir', 'test_tmp_dir', 'archive_dir'] - ) - yield Archive( - url=archive_url, - tmpdir=tmpdir, - test_tmp_dir=test_tmp_path, - archive_dir=archive_dir - ) +@pytest.fixture +def mock_stage_archive(tmp_build_stage_dir, tmp_root_stage, request): + """ + Create the directories and files for the staged mock archive. -@pytest.fixture() + Note that it can be important for other tests that the previous settings be + restored when the test case is over. + """ + # Mock up a stage area that looks like this: + # + # tmpdir/ test_files_dir + # tmp/ test_tmp_path (where stage should be) + # <_archive_base>/ archive_dir_path + # <_readme_fn> Optional test_readme (contains _readme_contents) + # <_extra_fn> Optional extra file (contains _extra_contents) + # <_hidden_fn> Optional hidden file (contains _hidden_contents) + # <_archive_fn> archive_url = file:///path/to/<_archive_fn> + # + def create_stage_archive(expected_file_list=[_include_readme]): + tmpdir, test_tmp_path = tmp_build_stage_dir + test_tmp_path.ensure(dir=True) + + # Create the archive directory and associated file + archive_dir = tmpdir.join(_archive_base) + archive = tmpdir.join(_archive_fn) + archive_url = 'file://' + str(archive) + archive_dir.ensure(dir=True) + + # Create the optional files as requested and make sure expanded + # archive peers are included. + tar_args = ['czf', str(_archive_fn), _archive_base] + for _include in expected_file_list: + if _include == _include_hidden: + # The hidden file case stands in for the way Mac OS X tar files + # represent HFS metadata. Locate in the same directory as the + # archive file. + tar_args.append(_hidden_fn) + fn, contents = (tmpdir.join(_hidden_fn), _hidden_contents) + + elif _include == _include_readme: + # The usual README.txt file is contained in the archive dir. + fn, contents = (archive_dir.join(_readme_fn), _readme_contents) + + elif _include == _include_extra: + # The extra file stands in for exploding tar files so needs + # to be in the same directory as the archive file. + tar_args.append(_extra_fn) + fn, contents = (tmpdir.join(_extra_fn), _extra_contents) + else: + break + + fn.write(contents) + + # Create the archive file + with tmpdir.as_cwd(): + tar = spack.util.executable.which('tar', required=True) + tar(*tar_args) + + Archive = collections.namedtuple( + 'Archive', ['url', 'tmpdir', 'test_tmp_dir', 'archive_dir'] + ) + return Archive(url=archive_url, tmpdir=tmpdir, + test_tmp_dir=test_tmp_path, archive_dir=archive_dir) + + return create_stage_archive + + +@pytest.fixture def mock_noexpand_resource(tmpdir): + """Set up a non-expandable resource in the tmpdir prior to staging.""" test_resource = tmpdir.join('resource-no-expand.sh') test_resource.write("an example resource") return str(test_resource) -@pytest.fixture() +@pytest.fixture def mock_expand_resource(tmpdir): + """Sets up an expandable resource in tmpdir prior to staging.""" resource_dir = tmpdir.join('resource-expand') archive_name = 'resource.tar.gz' archive = tmpdir.join(archive_name) @@ -165,10 +365,10 @@ def mock_expand_resource(tmpdir): test_file = resource_dir.join('resource-file.txt') resource_dir.ensure(dir=True) test_file.write('test content\n') - current = tmpdir.chdir() - tar = spack.util.executable.which('tar', required=True) - tar('czf', str(archive_name), 'resource-expand') - current.chdir() + + with tmpdir.as_cwd(): + tar = spack.util.executable.which('tar', required=True) + tar('czf', str(archive_name), 'resource-expand') MockResource = collections.namedtuple( 'MockResource', ['url', 'files']) @@ -176,11 +376,13 @@ def mock_expand_resource(tmpdir): return MockResource(archive_url, ['resource-file.txt']) -@pytest.fixture() +@pytest.fixture def composite_stage_with_expanding_resource( - mock_archive, mock_expand_resource): + mock_stage_archive, mock_expand_resource): + """Sets up a composite for expanding resources prior to staging.""" composite_stage = StageComposite() - root_stage = Stage(mock_archive.url) + archive = mock_stage_archive() + root_stage = Stage(archive.url) composite_stage.append(root_stage) test_resource_fetcher = spack.fetch_strategy.from_kwargs( @@ -195,7 +397,7 @@ def composite_stage_with_expanding_resource( return composite_stage, root_stage, resource_stage -@pytest.fixture() +@pytest.fixture def failing_search_fn(): """Returns a search function that fails! Always!""" def _mock(): @@ -203,7 +405,7 @@ def _mock(): return _mock -@pytest.fixture() +@pytest.fixture def failing_fetch_strategy(): """Returns a fetch strategy that fails.""" class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy): @@ -215,7 +417,7 @@ def fetch(self): return FailingFetchStrategy() -@pytest.fixture() +@pytest.fixture def search_fn(): """Returns a search function that always succeeds.""" class _Mock(object): @@ -234,28 +436,32 @@ class TestStage(object): stage_name = 'spack-test-stage' @pytest.mark.usefixtures('tmpdir_for_stage') - def test_setup_and_destroy_name_with_tmp(self, mock_archive): - with Stage(mock_archive.url, name=self.stage_name) as stage: - check_setup(stage, self.stage_name, mock_archive) + def test_setup_and_destroy_name_with_tmp(self, mock_stage_archive): + archive = mock_stage_archive() + with Stage(archive.url, name=self.stage_name) as stage: + check_setup(stage, self.stage_name, archive) check_destroy(stage, self.stage_name) - def test_setup_and_destroy_name_without_tmp(self, mock_archive): - with Stage(mock_archive.url, name=self.stage_name) as stage: - check_setup(stage, self.stage_name, mock_archive) + def test_setup_and_destroy_name_without_tmp(self, mock_stage_archive): + archive = mock_stage_archive() + with Stage(archive.url, name=self.stage_name) as stage: + check_setup(stage, self.stage_name, archive) check_destroy(stage, self.stage_name) @pytest.mark.usefixtures('tmpdir_for_stage') - def test_setup_and_destroy_no_name_with_tmp(self, mock_archive): - with Stage(mock_archive.url) as stage: - check_setup(stage, None, mock_archive) + def test_setup_and_destroy_no_name_with_tmp(self, mock_stage_archive): + archive = mock_stage_archive() + with Stage(archive.url) as stage: + check_setup(stage, None, archive) check_destroy(stage, None) @pytest.mark.disable_clean_stage_check @pytest.mark.usefixtures('tmpdir_for_stage') def test_composite_stage_with_noexpand_resource( - self, mock_archive, mock_noexpand_resource): + self, mock_stage_archive, mock_noexpand_resource): + archive = mock_stage_archive() composite_stage = StageComposite() - root_stage = Stage(mock_archive.url) + root_stage = Stage(archive.url) composite_stage.append(root_stage) resource_dst_name = 'resource-dst-name.sh' @@ -276,7 +482,7 @@ def test_composite_stage_with_noexpand_resource( @pytest.mark.disable_clean_stage_check @pytest.mark.usefixtures('tmpdir_for_stage') def test_composite_stage_with_expand_resource( - self, mock_archive, mock_expand_resource, + self, mock_stage_archive, mock_expand_resource, composite_stage_with_expanding_resource): composite_stage, root_stage, resource_stage = ( @@ -291,22 +497,26 @@ def test_composite_stage_with_expand_resource( root_stage.source_path, 'resource-dir', fname) assert os.path.exists(file_path) - def test_setup_and_destroy_no_name_without_tmp(self, mock_archive): - with Stage(mock_archive.url) as stage: - check_setup(stage, None, mock_archive) + def test_setup_and_destroy_no_name_without_tmp(self, mock_stage_archive): + archive = mock_stage_archive() + with Stage(archive.url) as stage: + check_setup(stage, None, archive) check_destroy(stage, None) - def test_fetch(self, mock_archive): - with Stage(mock_archive.url, name=self.stage_name) as stage: - stage.fetch() - check_setup(stage, self.stage_name, mock_archive) - check_fetch(stage, self.stage_name) - check_destroy(stage, self.stage_name) + @pytest.mark.parametrize('debug', [False, True]) + def test_fetch(self, mock_stage_archive, debug): + archive = mock_stage_archive() + with spack.config.override('config:debug', debug): + with Stage(archive.url, name=self.stage_name) as stage: + stage.fetch() + check_setup(stage, self.stage_name, archive) + check_fetch(stage, self.stage_name) + check_destroy(stage, self.stage_name) def test_no_search_if_default_succeeds( - self, mock_archive, failing_search_fn): - stage = Stage(mock_archive.url, - name=self.stage_name, + self, mock_stage_archive, failing_search_fn): + archive = mock_stage_archive() + stage = Stage(archive.url, name=self.stage_name, search_fn=failing_search_fn) with stage: stage.fetch() @@ -336,22 +546,49 @@ def test_search_if_default_fails(self, failing_fetch_strategy, search_fn): check_destroy(stage, self.stage_name) assert search_fn.performed_search - def test_expand_archive(self, mock_archive): - with Stage(mock_archive.url, name=self.stage_name) as stage: + def test_ensure_one_stage_entry(self, mock_stage_archive): + archive = mock_stage_archive() + with Stage(archive.url, name=self.stage_name) as stage: stage.fetch() - check_setup(stage, self.stage_name, mock_archive) + stage_path = get_stage_path(stage, self.stage_name) + spack.fetch_strategy._ensure_one_stage_entry(stage_path) + check_destroy(stage, self.stage_name) + + @pytest.mark.parametrize("expected_file_list", [ + [], + [_include_readme], + [_include_extra, _include_readme], + [_include_hidden, _include_readme]]) + def test_expand_archive(self, expected_file_list, mock_stage_archive): + archive = mock_stage_archive(expected_file_list) + with Stage(archive.url, name=self.stage_name) as stage: + stage.fetch() + check_setup(stage, self.stage_name, archive) check_fetch(stage, self.stage_name) stage.expand_archive() - check_expand_archive(stage, self.stage_name, mock_archive) + check_expand_archive(stage, self.stage_name, expected_file_list) check_destroy(stage, self.stage_name) - def test_restage(self, mock_archive): - with Stage(mock_archive.url, name=self.stage_name) as stage: + def test_expand_archive_extra_expand(self, mock_stage_archive): + """Test expand with an extra expand after expand (i.e., no-op).""" + archive = mock_stage_archive() + with Stage(archive.url, name=self.stage_name) as stage: + stage.fetch() + check_setup(stage, self.stage_name, archive) + check_fetch(stage, self.stage_name) + stage.expand_archive() + stage.fetcher.expand() + check_expand_archive(stage, self.stage_name, [_include_readme]) + check_destroy(stage, self.stage_name) + + def test_restage(self, mock_stage_archive): + archive = mock_stage_archive() + with Stage(archive.url, name=self.stage_name) as stage: stage.fetch() stage.expand_archive() with working_dir(stage.source_path): - check_expand_archive(stage, self.stage_name, mock_archive) + check_expand_archive(stage, self.stage_name, [_include_readme]) # Try to make a file in the old archive dir with open('foobar', 'w') as file: @@ -365,26 +602,29 @@ def test_restage(self, mock_archive): assert 'foobar' not in os.listdir(stage.source_path) check_destroy(stage, self.stage_name) - def test_no_keep_without_exceptions(self, mock_archive): - stage = Stage(mock_archive.url, name=self.stage_name, keep=False) + def test_no_keep_without_exceptions(self, mock_stage_archive): + archive = mock_stage_archive() + stage = Stage(archive.url, name=self.stage_name, keep=False) with stage: pass check_destroy(stage, self.stage_name) @pytest.mark.disable_clean_stage_check - def test_keep_without_exceptions(self, mock_archive): - stage = Stage(mock_archive.url, name=self.stage_name, keep=True) + def test_keep_without_exceptions(self, mock_stage_archive): + archive = mock_stage_archive() + stage = Stage(archive.url, name=self.stage_name, keep=True) with stage: pass path = get_stage_path(stage, self.stage_name) assert os.path.isdir(path) @pytest.mark.disable_clean_stage_check - def test_no_keep_with_exceptions(self, mock_archive): + def test_no_keep_with_exceptions(self, mock_stage_archive): class ThisMustFailHere(Exception): pass - stage = Stage(mock_archive.url, name=self.stage_name, keep=False) + archive = mock_stage_archive() + stage = Stage(archive.url, name=self.stage_name, keep=False) try: with stage: raise ThisMustFailHere() @@ -394,11 +634,12 @@ class ThisMustFailHere(Exception): assert os.path.isdir(path) @pytest.mark.disable_clean_stage_check - def test_keep_exceptions(self, mock_archive): + def test_keep_exceptions(self, mock_stage_archive): class ThisMustFailHere(Exception): pass - stage = Stage(mock_archive.url, name=self.stage_name, keep=True) + archive = mock_stage_archive() + stage = Stage(archive.url, name=self.stage_name, keep=True) try: with stage: raise ThisMustFailHere() @@ -406,3 +647,55 @@ class ThisMustFailHere(Exception): except ThisMustFailHere: path = get_stage_path(stage, self.stage_name) assert os.path.isdir(path) + + def test_source_path_available(self, mock_stage_archive): + """Ensure source path available but does not exist on instantiation.""" + archive = mock_stage_archive() + stage = Stage(archive.url, name=self.stage_name) + + source_path = stage.source_path + assert source_path + assert source_path.endswith(spack.stage._source_path_subdir) + assert not os.path.exists(source_path) + + def test_first_accessible_path_error(self): + """Test _first_accessible_path handling of an OSError.""" + with tempfile.NamedTemporaryFile() as _file: + assert spack.stage._first_accessible_path([_file.name]) is None + + def test_get_tmp_root_no_use(self, no_tmp_root_stage): + """Ensure not using tmp root results in no path.""" + assert spack.stage.get_tmp_root() is None + + def test_get_tmp_root_non_user_path(self, tmp_root_stage, + non_user_path_for_stage): + """Ensure build_stage of tmp root with non-user path includes user.""" + path = spack.stage.get_tmp_root() + assert path.endswith(os.path.join(getpass.getuser(), 'spack-stage')) + + def test_get_tmp_root_use(self, tmp_root_stage, tmp_path_for_stage): + """Ensure build_stage of tmp root provides has right ancestors.""" + path = spack.stage.get_tmp_root() + shutil.rmtree(path) + assert path.rfind('test_get_tmp_root_use') > 0 + assert path.endswith('spack-stage') + + def test_get_tmp_root_stage_path(self, tmp_root_stage, + stage_path_for_stage): + """ + Ensure build_stage of tmp root with stage_path means use local path. + """ + assert spack.stage.get_tmp_root() is None + assert not spack.stage._use_tmp_stage + + def test_stage_constructor_no_fetcher(self): + """Ensure Stage constructor with no URL or fetch strategy fails.""" + with pytest.raises(ValueError): + with Stage(None): + pass + + def test_stage_constructor_with_path(self, tmpdir): + """Ensure Stage constructor with a path uses it.""" + testpath = str(tmpdir) + with Stage('file:///does-not-exist', path=testpath) as stage: + assert stage.path == testpath diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 1542615326e70..a915a82874abe 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -7,12 +7,14 @@ import pytest -from llnl.util.filesystem import touch, working_dir +from llnl.util.filesystem import touch, working_dir, mkdirp import spack.repo import spack.config from spack.spec import Spec +from spack.stage import Stage from spack.version import ver +from spack.fetch_strategy import SvnFetchStrategy from spack.util.executable import which @@ -73,3 +75,19 @@ def test_fetch( assert os.path.isfile(file_path) assert h() == t.revision + + +def test_svn_extra_fetch(tmpdir): + """Ensure a fetch after downloading is effectively a no-op.""" + testpath = str(tmpdir) + + fetcher = SvnFetchStrategy(svn='file:///not-a-real-svn-repo') + assert fetcher is not None + + with Stage(fetcher, path=testpath) as stage: + assert stage is not None + + source_path = stage.source_path + mkdirp(source_path) + + fetcher.fetch() diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py index 4b794eb1c2f04..cc9560350ab33 100644 --- a/lib/spack/spack/test/url_fetch.py +++ b/lib/spack/spack/test/url_fetch.py @@ -10,8 +10,10 @@ import spack.repo import spack.config +from spack.fetch_strategy import FailedDownloadError from spack.fetch_strategy import from_list_url, URLFetchStrategy from spack.spec import Spec +from spack.stage import Stage from spack.version import ver import spack.util.crypto as crypto @@ -21,6 +23,27 @@ def checksum_type(request): return request.param +def test_urlfetchstrategy_sans_url(): + """Ensure constructor with no URL fails.""" + with pytest.raises(ValueError): + with URLFetchStrategy(None): + pass + + +def test_urlfetchstrategy_bad_url(tmpdir): + """Ensure fetch with bad URL fails as expected.""" + testpath = str(tmpdir) + + with pytest.raises(FailedDownloadError): + fetcher = URLFetchStrategy(url='file:///does-not-exist') + assert fetcher is not None + + with Stage(fetcher, path=testpath) as stage: + assert stage is not None + assert fetcher.archive_file is None + fetcher.fetch() + + @pytest.mark.parametrize('secure', [True, False]) def test_fetch( mock_archive, @@ -135,3 +158,15 @@ def test_hash_detection(checksum_type): def test_unknown_hash(checksum_type): with pytest.raises(ValueError): crypto.Checker('a') + + +def test_url_extra_fetch(tmpdir, mock_archive): + """Ensure a fetch after downloading is effectively a no-op.""" + testpath = str(tmpdir) + + fetcher = URLFetchStrategy(mock_archive.url) + with Stage(fetcher, path=testpath) as stage: + assert fetcher.archive_file is None + stage.fetch() + assert fetcher.archive_file is not None + fetcher.fetch() diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests index 706720c0335fc..cd5c057998028 100755 --- a/share/spack/qa/run-unit-tests +++ b/share/spack/qa/run-unit-tests @@ -31,7 +31,7 @@ ${coverage_run} bin/spack -h ${coverage_run} bin/spack help -a # Profile and print top 20 lines for a simple call to spack spec -if [[ $TRAVIS_PYTHON_VERSION != 2.6 ]]; then +if [[ $TRAVIS_PYTHON_VERSION != 2* ]]; then ${coverage_run} bin/spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170 fi