Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dependency types #378

Merged
merged 17 commits into from
Jul 15, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions lib/spack/docs/packaging_guide.rst
Original file line number Diff line number Diff line change
Expand Up @@ -1286,6 +1286,31 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.

Additionally, dependencies may be specified for specific use cases:

.. code-block:: python

depends_on("cmake", type="build")
depends_on("libelf", type=("build", "link"))
depends_on("python", type="run")

The dependency types are:

* **"build"**: made available during the project's build. The package will
be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
Other projects which depend on this one will not have these modified
(building project X doesn't need project Y's build dependencies).
* **"link"**: the project is linked to by the project. The package will be
added to the current package's ``rpath``.
* **"run"**: the project is used by the project at runtime. The package will
be added to ``PATH`` and ``PYTHONPATH``.

If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
common case for compiled language usage. Also available are the aliases
``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for
use by dependencies which are not expressed via a linker (e.g., Python or Lua
module loading).

.. _setup-dependent-environment:

``setup_dependent_environment()``
Expand Down
3 changes: 2 additions & 1 deletion lib/spack/spack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,10 +177,11 @@
# should live. This file is overloaded for spack core vs. for packages.
#
__all__ = ['Package', 'StagedPackage', 'CMakePackage', \
'Version', 'when', 'ver']
'Version', 'when', 'ver', 'alldeps', 'nolink']
from spack.package import Package, ExtensionConflictError
from spack.package import StagedPackage, CMakePackage
from spack.version import Version, ver
from spack.spec import DependencySpec, alldeps, nolink
from spack.multimethod import when

import llnl.util.filesystem
Expand Down
14 changes: 6 additions & 8 deletions lib/spack/spack/build_environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,8 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set_path(SPACK_ENV_PATH, env_paths)

# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
dep_prefixes = [d.prefix
for d in pkg.spec.traverse(root=False, deptype='build')]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
Expand Down Expand Up @@ -337,10 +338,6 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')

# TODO: shouldn't really use "which" here. Consider adding notion
# TODO: of build dependencies, as opposed to link dependencies.
# TODO: Currently, everything is a link dependency, but tools like
# TODO: this shouldn't be.
m.cmake = Executable('cmake')
m.ctest = Executable('ctest')

Expand Down Expand Up @@ -388,9 +385,10 @@ def set_module_variables_for_package(pkg, module):
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
deps = pkg.spec.dependencies(deptype='link')
rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
Expand Down Expand Up @@ -449,7 +447,7 @@ def setup_package(pkg, dirty=False):
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
for dspec in pkg.spec.traverse(order='post', root=False):
for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,
Expand Down
2 changes: 1 addition & 1 deletion lib/spack/spack/cmd/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def fetch(parser, args):
for spec in specs:
if args.missing or args.dependencies:
to_fetch = set()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue
Expand Down
14 changes: 8 additions & 6 deletions lib/spack/spack/cmd/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,12 +81,14 @@ def print_text_info(pkg):

print " " + fmt % (name, default, desc)

print
print "Dependencies:"
if pkg.dependencies:
colify(pkg.dependencies, indent=4)
else:
print " None"
for deptype in ('build', 'link', 'run'):
print
print "%s Dependencies:" % deptype.capitalize()
deps = pkg.dependencies(deptype)
if deps:
colify(deps, indent=4)
else:
print " None"

print
print "Virtual packages: "
Expand Down
2 changes: 1 addition & 1 deletion lib/spack/spack/cmd/mirror.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
for s in spec.traverse():
for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)

Expand Down
2 changes: 1 addition & 1 deletion lib/spack/spack/cmd/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def _find_modules(spec, modules_list):
return

if flags.recurse_dependencies:
for dep in spec.dependencies.values():
for dep in spec.dependencies():
_find_modules(dep, modules_list)

mod = module_types[mtype](spec)
Expand Down
14 changes: 9 additions & 5 deletions lib/spack/spack/cmd/package-list.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,11 +80,15 @@ def print_rst_package_list():
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
if pkg.dependencies:
print "Dependencies"
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in pkg.dependencies)
print

for deptype in ('build', 'link', 'run'):
deps = pkg.dependencies(deptype)
if deps:
print "%s Dependencies" % deptype.capitalize()
print " " + ", ".join("`%s`_" % d if d != "mpi" else d
for d in build_deps)
print

print "Description:"
print pkg.format_doc(indent=2)
print
Expand Down
7 changes: 6 additions & 1 deletion lib/spack/spack/cmd/test-install.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,12 @@ def fetch_log(path):


def failed_dependencies(spec):
return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
def get_deps(deptype):
return set(item for item in spec.dependencies(deptype)
if not spack.repo.get(item).installed)
link_deps = get_deps('link')
run_deps = get_deps('run')
return link_deps.union(run_deps)


def get_top_spec_or_die(args):
Expand Down
10 changes: 6 additions & 4 deletions lib/spack/spack/concretize.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def cmp_externals(a, b):
usable.sort(cmp=cmp_externals)
return usable


# XXX(deptypes): Look here.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is this supposed to get fixed? not sure what it's for

def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
Expand Down Expand Up @@ -394,16 +394,18 @@ def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
dagiter = chain(spec.traverse(direction='parents', root=False),
spec.traverse(direction='children', root=False))
deptype = ('build', 'link')
dagiter = chain(
spec.traverse(direction='parents', deptype=deptype, root=False),
spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
return relative
visited.add(id(relative))

# Then search all other relatives in the DAG *except* spec
for relative in spec.root.traverse():
for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec: continue
if id(relative) in visited: continue
if condition(relative):
Expand Down
22 changes: 15 additions & 7 deletions lib/spack/spack/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,9 +215,14 @@ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
# Add dependencies from other records in the install DB to
# form a full spec.
if 'dependencies' in spec_dict[spec.name]:
for dep_hash in spec_dict[spec.name]['dependencies'].values():
for dep in spec_dict[spec.name]['dependencies'].values():
if type(dep) == tuple:
dep_hash, deptypes = dep
else:
dep_hash = dep
deptypes = spack.alldeps
child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
spec._add_dependency(child)
spec._add_dependency(child, deptypes)

# Specs from the database need to be marked concrete because
# they represent actual installations.
Expand Down Expand Up @@ -334,7 +339,10 @@ def _check_ref_counts(self):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
for dep in rec.spec.dependencies.values():
# XXX(deptype): This checks all dependencies, but build
# dependencies might be able to be dropped in the
# future.
Copy link
Member

@tgamblin tgamblin Apr 28, 2016

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is this consistent with the ref counting? It looks like you only increment for link here, but this checks for ('build', 'link'), right?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Dependency queries get all by default. Only traversal defaults to ('build', 'link').

for dep in rec.spec.dependencies():
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
Expand Down Expand Up @@ -406,7 +414,7 @@ def _add(self, spec, path, directory_layout=None, explicit=False):
else:
self._data[key] = InstallRecord(spec, path, True,
explicit=explicit)
for dep in spec.dependencies.values():
for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep, directory_layout)

def _increment_ref_count(self, spec, directory_layout=None):
Expand All @@ -421,7 +429,7 @@ def _increment_ref_count(self, spec, directory_layout=None):

self._data[key] = InstallRecord(spec.copy(), path, installed)

for dep in spec.dependencies.values():
for dep in spec.dependencies('link'):
self._increment_ref_count(dep)

self._data[key].ref_count += 1
Expand Down Expand Up @@ -466,7 +474,7 @@ def _decrement_ref_count(self, spec):

if rec.ref_count == 0 and not rec.installed:
del self._data[key]
for dep in spec.dependencies.values():
for dep in spec.dependencies('link'):
self._decrement_ref_count(dep)

def _remove(self, spec):
Expand All @@ -480,7 +488,7 @@ def _remove(self, spec):
return rec.spec

del self._data[key]
for dep in rec.spec.dependencies.values():
for dep in rec.spec.dependencies('link'):
self._decrement_ref_count(dep)

# Returns the concrete spec so we know it in the case where a
Expand Down
39 changes: 34 additions & 5 deletions lib/spack/spack/directives.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def version(pkg, ver, checksum=None, **kwargs):
pkg.versions[Version(ver)] = kwargs


def _depends_on(pkg, spec, when=None):
def _depends_on(pkg, spec, when=None, type=None):
# If when is False do nothing
if when is False:
return
Expand All @@ -180,24 +180,43 @@ def _depends_on(pkg, spec, when=None):
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)

if type is None:
# The default deptype is build and link because the common case is to
# build against a library which then turns into a runtime dependency
# due to the linker.
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

looks like an outdated comment. From the description of run and link, i got an impression that run is primarily for extensions of python and alike.

i suppose mpi would be build+link+run?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Probably not, actually. The package itself does not call mpirun at runtime. The user calls that before the app runs. So, MPI is just a build and link dependency of the application. A run dependency would be something like curl for git.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

curl and git are probably build dependencies?

I think for distant observer like me it is not clear what run dependencies are in the context of package manager like Spack? You don't normally use/run application using Spack? Maybe this could be clarified in the description of run at the top.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

An easier way to think about this is whether, when A -> B, B needs to be in the PATH when you module load A in order for A to work.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I am not mistaken run is thereby used exclusively in spack load and should not affect environment of spack install at all? I think example you mentioned will be helpful for package developers to understand the difference. So i would mention it in the description.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep.

To understand this you have to distinguish runtime from build time. run dependencies are for when a user runs the installed package. build dependencies are for when spack builds the package. Those are two different environments. This is the same reason why things like Package.setup_environment() distinguish between spack_env and run_env.

# XXX(deptype): Add 'run' to this? It's an uncommon dependency type,
# but is most backwards-compatible.
type = ('build', 'link')

if isinstance(type, str):
type = (type,)

for deptype in type:
if deptype not in spack.spec.alldeps:
raise UnknownDependencyTypeError('depends_on', pkg.name, deptype)

dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)

pkg_deptypes = pkg._deptypes.setdefault(dep_spec.name, set())
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I like setdefault and all but I hate that it's not short-circuiting. set construction is fast, I suppose, but the biggest current bottleneck in Spack is reading all the package.py files so every little bit helps...

I doubt this is an actual, measurable performance problem but the tools guy in me looks at it and winces.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The set is not what I'd be worried about. Instead, it's the two dictionary lookups:

if 'foo' not in d:
    d['foo'] = local = Default()
else:
    local = d['foo']

It would be nice if dictionaries had setdefault_with which took a 0-ary function, but alas…

for deptype in type:
pkg_deptypes.add(deptype)

conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
else:
conditions[when_spec] = dep_spec


@directive('dependencies')
def depends_on(pkg, spec, when=None):
@directive(('dependencies', '_deptypes'))
def depends_on(pkg, spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply."""
_depends_on(pkg, spec, when=when)
_depends_on(pkg, spec, when=when, type=type)


@directive(('extendees', 'dependencies'))
@directive(('extendees', 'dependencies', '_deptypes'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.

Expand Down Expand Up @@ -326,3 +345,13 @@ def __init__(self, directive, package):
directive,
"Package '%s' cannot pass itself to %s" % (package, directive))
self.package = package


class UnknownDependencyTypeError(DirectiveError):
"""This is raised when a dependency is of an unknown type."""
def __init__(self, directive, package, deptype):
super(UnknownDependencyTypeError, self).__init__(
directive,
"Package '%s' cannot depend on a package via %s." %
(package, deptype))
self.package = package
4 changes: 2 additions & 2 deletions lib/spack/spack/directory_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ def check_installed(self, spec):
return path

if spec.dag_hash() == installed_spec.dag_hash():
raise SpecHashCollisionError(installed_hash, spec_hash)
raise SpecHashCollisionError(spec, installed_spec)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path)
Expand Down Expand Up @@ -431,7 +431,7 @@ class SpecHashCollisionError(DirectoryLayoutError):
def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
% installed_spec, new_spec)
% (installed_spec, new_spec))


class RemoveFailedError(DirectoryLayoutError):
Expand Down
10 changes: 6 additions & 4 deletions lib/spack/spack/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,14 @@ def topological_sort(spec, **kwargs):

"""
reverse = kwargs.get('reverse', False)
# XXX(deptype): iterate over a certain kind of dependency. Maybe color
# edges based on the type of dependency?
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This sounds cool. But don't worry too much about spack graph for now -- we can always make it nicer later.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, any of these XXX(deptype) bits can be turned into TODO before merge.

if not reverse:
parents = lambda s: s.dependents
children = lambda s: s.dependencies
parents = lambda s: s.dependents()
children = lambda s: s.dependencies()
else:
parents = lambda s: s.dependencies
children = lambda s: s.dependents
parents = lambda s: s.dependencies()
children = lambda s: s.dependents()

# Work on a copy so this is nondestructive.
spec = spec.copy()
Expand Down
2 changes: 1 addition & 1 deletion lib/spack/spack/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def dependencies(spec, request='all'):
return []

if request == 'direct':
return [xx for _, xx in spec.dependencies.items()]
return spec.dependencies()

# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
Expand Down
Loading