Skip to content

Commit

Permalink
Merge pull request #1015 from LLNL/features/faster-virtuals
Browse files Browse the repository at this point in the history
Faster virtuals and concretization
  • Loading branch information
tgamblin committed Aug 9, 2016
2 parents fb9f6fe + 9d4a36a commit a095fd5
Show file tree
Hide file tree
Showing 25 changed files with 1,348 additions and 469 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
/var/spack/stage
/var/spack/cache
/var/spack/repos/*/index.yaml
/var/spack/repos/*/lock
*.pyc
/opt
*~
Expand Down
89 changes: 79 additions & 10 deletions lib/spack/llnl/util/lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@
import time
import socket

__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
'LockError']

# Default timeout in seconds, after which locks will raise exceptions.
_default_timeout = 60

Expand All @@ -36,13 +39,20 @@


class Lock(object):
def __init__(self,file_path):
"""This is an implementation of a filesystem lock using Python's lockf.
In Python, `lockf` actually calls `fcntl`, so this should work with any
filesystem implementation that supports locking through the fcntl calls.
This includes distributed filesystems like Lustre (when flock is enabled)
and recent NFS versions.
"""
def __init__(self, file_path):
self._file_path = file_path
self._fd = None
self._reads = 0
self._writes = 0


def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
Expand All @@ -63,7 +73,9 @@ def _lock(self, op, timeout):

fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
os.write(
self._fd,
"pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return

except IOError as error:
Expand All @@ -75,19 +87,17 @@ def _lock(self, op, timeout):

raise LockError("Timed out waiting for lock.")


def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
Releases the lock regardless of mode. Note that read locks may
be masquerading as write locks, but this removes either.
"""
fcntl.lockf(self._fd,fcntl.LOCK_UN)
fcntl.lockf(self._fd, fcntl.LOCK_UN)
os.close(self._fd)
self._fd = None


def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
Expand All @@ -107,7 +117,6 @@ def acquire_read(self, timeout=_default_timeout):
self._reads += 1
return False


def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
Expand All @@ -127,7 +136,6 @@ def acquire_write(self, timeout=_default_timeout):
self._writes += 1
return False


def release_read(self):
"""Releases a read lock.
Expand All @@ -148,7 +156,6 @@ def release_read(self):
self._reads -= 1
return False


def release_write(self):
"""Releases a write lock.
Expand All @@ -170,6 +177,68 @@ def release_write(self):
return False


class LockTransaction(object):
"""Simple nested transaction context manager that uses a file lock.
This class can trigger actions when the lock is acquired for the
first time and released for the last.
If the acquire_fn returns a value, it is used as the return value for
__enter__, allowing it to be passed as the `as` argument of a `with`
statement.
If acquire_fn returns a context manager, *its* `__enter__` function will be
called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
called before `release_fn` in `__exit__`, allowing you to nest a context
manager to be used along with the lock.
Timeout for lock is customizable.
"""

def __init__(self, lock, acquire_fn=None, release_fn=None,
timeout=_default_timeout):
self._lock = lock
self._timeout = timeout
self._acquire_fn = acquire_fn
self._release_fn = release_fn
self._as = None

def __enter__(self):
if self._enter() and self._acquire_fn:
self._as = self._acquire_fn()
if hasattr(self._as, '__enter__'):
return self._as.__enter__()
else:
return self._as

def __exit__(self, type, value, traceback):
suppress = False
if self._exit():
if self._as and hasattr(self._as, '__exit__'):
if self._as.__exit__(type, value, traceback):
suppress = True
if self._release_fn:
if self._release_fn(type, value, traceback):
suppress = True
return suppress


class ReadTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_read(self._timeout)

def _exit(self):
return self._lock.release_read()


class WriteTransaction(LockTransaction):
def _enter(self):
return self._lock.acquire_write(self._timeout)

def _exit(self):
return self._lock.release_write()


class LockError(Exception):
"""Raised when an attempt to acquire a lock times out."""
pass
31 changes: 20 additions & 11 deletions lib/spack/spack/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
Expand Down Expand Up @@ -50,8 +51,15 @@
share_path = join_path(spack_root, "share", "spack")
cache_path = join_path(var_path, "cache")

# User configuration location
user_config_path = os.path.expanduser('~/.spack')

import spack.fetch_strategy
cache = spack.fetch_strategy.FsCache(cache_path)
fetch_cache = spack.fetch_strategy.FsCache(cache_path)

from spack.file_cache import FileCache
user_cache_path = join_path(user_config_path, 'cache')
user_cache = FileCache(user_cache_path)

prefix = spack_root
opt_path = join_path(prefix, "opt")
Expand Down Expand Up @@ -140,7 +148,7 @@
_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
for path in _tmp_candidates:
# don't add a second username if it's already unique by user.
if not _tmp_user in path:
if _tmp_user not in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
else:
tmp_dirs.append(join_path(path, 'spack-stage'))
Expand Down Expand Up @@ -172,12 +180,13 @@
# Spack internal code should call 'import spack' and accesses other
# variables (spack.repo, paths, etc.) directly.
#
# TODO: maybe this should be separated out and should go in build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in packages
# should live. This file is overloaded for spack core vs. for packages.
# TODO: maybe this should be separated out to build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in
# packages should live. This file is overloaded for spack core vs.
# for packages.
#
__all__ = ['Package', 'StagedPackage', 'CMakePackage', \
'Version', 'when', 'ver', 'alldeps', 'nolink']
__all__ = ['Package', 'StagedPackage', 'CMakePackage',
'Version', 'when', 'ver', 'alldeps', 'nolink']
from spack.package import Package, ExtensionConflictError
from spack.package import StagedPackage, CMakePackage
from spack.version import Version, ver
Expand All @@ -197,8 +206,8 @@
__all__ += spack.util.executable.__all__

from spack.package import \
install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
InstallError, ExternalPackageError
install_dependency_symlinks, flatten_dependencies, \
DependencyConflictError, InstallError, ExternalPackageError
__all__ += [
'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
'InstallError', 'ExternalPackageError']
'install_dependency_symlinks', 'flatten_dependencies',
'DependencyConflictError', 'InstallError', 'ExternalPackageError']
7 changes: 7 additions & 0 deletions lib/spack/spack/architecture.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,13 @@ def __str__(self):
def __contains__(self, string):
return string in str(self)

# TODO: make this unnecessary: don't include an empty arch on *every* spec.
def __nonzero__(self):
return (self.platform is not None or
self.platform_os is not None or
self.target is not None)
__bool__ = __nonzero__

def _cmp_key(self):
if isinstance(self.platform, Platform):
platform = self.platform.name
Expand Down
14 changes: 10 additions & 4 deletions lib/spack/spack/cmd/purge.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,20 +33,26 @@ def setup_parser(subparser):
'-s', '--stage', action='store_true', default=True,
help="Remove all temporary build stages (default).")
subparser.add_argument(
'-c', '--cache', action='store_true', help="Remove cached downloads.")
'-d', '--downloads', action='store_true',
help="Remove cached downloads.")
subparser.add_argument(
'-u', '--user-cache', action='store_true',
help="Remove caches in user home directory. Includes virtual indices.")
subparser.add_argument(
'-a', '--all', action='store_true',
help="Remove all of the above.")


def purge(parser, args):
# Special case: no flags.
if not any((args.stage, args.cache, args.all)):
if not any((args.stage, args.downloads, args.user_cache, args.all)):
stage.purge()
return

# handle other flags with fall through.
if args.stage or args.all:
stage.purge()
if args.cache or args.all:
spack.cache.destroy()
if args.downloads or args.all:
spack.fetch_cache.destroy()
if args.user_cache or args.all:
spack.user_cache.destroy()
19 changes: 10 additions & 9 deletions lib/spack/spack/cmd/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,28 +23,28 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
from pprint import pprint

from llnl.util.filesystem import join_path, mkdirp
from llnl.util.tty.colify import colify
from llnl.util.lang import list_modules

import spack
import spack.test
from spack.fetch_strategy import FetchError

description ="Run unit tests"
description = "Run unit tests"


def setup_parser(subparser):
subparser.add_argument(
'names', nargs='*', help="Names of tests to run.")
subparser.add_argument(
'-l', '--list', action='store_true', dest='list', help="Show available tests")
'-l', '--list', action='store_true', dest='list',
help="Show available tests")
subparser.add_argument(
'--createXmlOutput', action='store_true', dest='createXmlOutput',
'--createXmlOutput', action='store_true', dest='createXmlOutput',
help="Create JUnit XML from test results")
subparser.add_argument(
'--xmlOutputDir', dest='xmlOutputDir',
'--xmlOutputDir', dest='xmlOutputDir',
help="Nose creates XML files in this directory")
subparser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
Expand All @@ -62,13 +62,14 @@ def fetcher(self, targetPath, digest):
class MockCacheFetcher(object):
def set_stage(self, stage):
pass

def fetch(self):
raise FetchError("Mock cache always fails for tests")

def __str__(self):
return "[mock fetcher]"


def test(parser, args):
if args.list:
print "Available tests:"
Expand All @@ -82,8 +83,8 @@ def test(parser, args):
outputDir = join_path(os.getcwd(), "test-output")
else:
outputDir = os.path.abspath(args.xmlOutputDir)

if not os.path.exists(outputDir):
mkdirp(outputDir)
spack.cache = MockCache()
spack.fetch_cache = MockCache()
spack.test.run(args.names, outputDir, args.verbose)
3 changes: 2 additions & 1 deletion lib/spack/spack/cmd/uninstall.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,8 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list))

if has_error:
tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well') # NOQA: ignore=E501
tty.die('You can use spack uninstall --dependents '
'to uninstall these dependencies as well')

if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
Expand Down
2 changes: 1 addition & 1 deletion lib/spack/spack/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@ def clear(self):
ConfigScope('site', os.path.join(spack.etc_path, 'spack'))

"""User configuration can override both spack defaults and site config."""
ConfigScope('user', os.path.expanduser('~/.spack'))
ConfigScope('user', spack.user_config_path)


def highest_precedence_scope():
Expand Down
Loading

0 comments on commit a095fd5

Please sign in to comment.