Skip to content
Permalink
Browse files

Extract the `Digest.load`/`Digest.dump` methods from the RSC task and…

… reuse in SimpleCodegenTask.
  • Loading branch information...
stuhood committed Feb 13, 2019
1 parent 24fbf78 commit aa70f0745c51fbf265430adf62d0b97f2421c1c0
@@ -33,8 +33,7 @@
from pants.java.jar.jar_dependency import JarDependency
from pants.reporting.reporting_utils import items_to_report_element
from pants.util.contextutil import Timer
from pants.util.dirutil import (fast_relpath, fast_relpath_optional, maybe_read_file,
safe_file_dump, safe_mkdir)
from pants.util.dirutil import fast_relpath, fast_relpath_optional, safe_mkdir
from pants.util.memo import memoized_property


@@ -60,22 +59,6 @@ def stdout_contents(wu):
return f.read().rstrip()


def write_digest(output_dir, digest):
safe_file_dump(
'{}.digest'.format(output_dir),
mode='w',
payload='{}:{}'.format(digest.fingerprint, digest.serialized_bytes_length))


def load_digest(output_dir):
read_file = maybe_read_file('{}.digest'.format(output_dir), binary_mode=False)
if read_file:
fingerprint, length = read_file.split(':')
return Digest(fingerprint, int(length))
else:
return None


def _create_desandboxify_fn(possible_path_patterns):
# Takes a collection of possible canonical prefixes, and returns a function that
# if it finds a matching prefix, strips the path prior to the prefix and returns it
@@ -132,7 +115,7 @@ def __init__(self, *args, **kwargs):

@classmethod
def implementation_version(cls):
return super(RscCompile, cls).implementation_version() + [('RscCompile', 171)]
return super(RscCompile, cls).implementation_version() + [('RscCompile', 172)]

@classmethod
def register_options(cls, register):
@@ -218,7 +201,7 @@ def pathglob_for(filename):
def to_classpath_entries(paths, scheduler):
# list of path ->
# list of (path, optional<digest>) ->
path_and_digests = [(p, load_digest(os.path.dirname(p))) for p in paths]
path_and_digests = [(p, Digest.load(os.path.dirname(p))) for p in paths]
# partition: list of path, list of tuples
paths_without_digests = [p for (p, d) in path_and_digests if not d]
if paths_without_digests:
@@ -825,7 +808,7 @@ def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input
raise TaskError(res.stderr)

if output_dir:
write_digest(output_dir, res.output_directory_digest)
res.output_directory_digest.dump(output_dir)
self.context._scheduler.materialize_directories((
DirectoryToMaterialize(
# NB the first element here is the root to materialize into, not the dir to snapshot
@@ -4,12 +4,15 @@

from __future__ import absolute_import, division, print_function, unicode_literals

import os

from future.utils import binary_type, text_type

from pants.engine.objects import Collection
from pants.engine.rules import RootRule
from pants.option.custom_types import GlobExpansionConjunction
from pants.option.global_options import GlobMatchErrorBehavior
from pants.util.dirutil import maybe_read_file, safe_delete, safe_file_dump
from pants.util.objects import Exactly, datatype


@@ -79,6 +82,33 @@ class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', i
https://github.com/pantsbuild/pants/issues/5802
"""

@classmethod
def _path(cls, directory):
return '{}.digest'.format(directory.rstrip(os.sep))

@classmethod
def clear(cls, directory):
"""Clear any existing Digest file adjacent to the given directory."""
safe_delete(cls._path(directory))

@classmethod
def load(cls, directory):
"""Load a Digest from a `.digest` file adjacent to the given directory.
:return: A Digest, or None if the Digest did not exist.
"""
read_file = maybe_read_file(cls._path(directory), binary_mode=False)
if read_file:
fingerprint, length = read_file.split(':')
return Digest(fingerprint, int(length))
else:
return None

def dump(self, directory):
"""Dump this Digest object adjacent to the given directory."""
payload = '{}:{}'.format(self.fingerprint, self.serialized_bytes_length)
safe_file_dump(self._path(directory), payload=payload, mode='w')

def __repr__(self):
return '''Digest(fingerprint={}, serialized_bytes_length={})'''.format(
self.fingerprint,
@@ -18,11 +18,11 @@
from pants.base.workunit import WorkUnitLabel
from pants.build_graph.address import Address
from pants.build_graph.address_lookup_error import AddressLookupError
from pants.engine.fs import PathGlobs, PathGlobsAndRoot
from pants.engine.fs import Digest, PathGlobs, PathGlobsAndRoot
from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper
from pants.task.task import Task
from pants.util.collections_abc_backport import OrderedDict
from pants.util.dirutil import safe_delete
from pants.util.dirutil import fast_relpath, safe_delete


logger = logging.getLogger(__name__)
@@ -113,6 +113,10 @@ def synthetic_target_extra_dependencies(self, target, target_workdir):
"""
return []

@classmethod
def implementation_version(cls):
return super(SimpleCodegenTask, cls).implementation_version() + [('SimpleCodegenTask', 2)]

def synthetic_target_extra_exports(self, target, target_workdir):
"""Gets any extra exports generated synthetic targets should have.
@@ -206,7 +210,7 @@ def _do_validate_sources_present(self, target):

def _get_synthetic_address(self, target, target_workdir):
synthetic_name = target.id
sources_rel_path = os.path.relpath(target_workdir, get_buildroot())
sources_rel_path = fast_relpath(target_workdir, get_buildroot())
synthetic_address = Address(sources_rel_path, synthetic_name)
return synthetic_address

@@ -230,7 +234,8 @@ def execute(self):
with self.context.new_workunit(name='execute', labels=[WorkUnitLabel.MULTITOOL]):
vts_to_sources = OrderedDict()
for vt in invalidation_check.all_vts:
synthetic_target_dir = self.synthetic_target_dir(vt.target, vt.results_dir)

synthetic_target_dir = self.synthetic_target_dir(vt.target, vt.current_results_dir)

key = (vt, synthetic_target_dir)
vts_to_sources[key] = None
@@ -243,6 +248,7 @@ def execute(self):
# _handle_duplicate_sources may delete files from the filesystem, so we need to
# re-capture the sources.
if not self._handle_duplicate_sources(vt.target, vt.results_dir, sources):
Digest.clear(synthetic_target_dir)
vts_to_sources[key] = sources
vt.update()

@@ -290,7 +296,7 @@ def _capture_sources(self, targets_and_dirs):
for target, synthetic_target_dir in targets_and_dirs:
files = self.sources_globs

results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot())
results_dir_relpath = fast_relpath(synthetic_target_dir, get_buildroot())
buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files)
buildroot_relative_excludes = tuple(
os.path.join(results_dir_relpath, file)
@@ -300,13 +306,17 @@ def _capture_sources(self, targets_and_dirs):
PathGlobsAndRoot(
PathGlobs(buildroot_relative_globs, buildroot_relative_excludes),
text_type(get_buildroot()),
Digest.load(synthetic_target_dir),
)
)
results_dirs.append(results_dir_relpath)
filespecs.append(FilesetRelPathWrapper.to_filespec(buildroot_relative_globs))

snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture))

for snapshot, (_, synthetic_target_dir) in zip(snapshots, targets_and_dirs):
snapshot.directory_digest.dump(synthetic_target_dir)

return tuple(EagerFilesetWithSpec(
results_dir_relpath,
filespec,

0 comments on commit aa70f07

Please sign in to comment.
You can’t perform that action at this time.