Skip to content

Commit

Permalink
Add and write timers for top level install
Browse files Browse the repository at this point in the history
  • Loading branch information
kwryankrattiger committed Jul 11, 2023
1 parent 9925546 commit 00463bf
Show file tree
Hide file tree
Showing 6 changed files with 71 additions and 25 deletions.
1 change: 1 addition & 0 deletions lib/spack/spack/cmd/ci.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,6 +523,7 @@ def ci_rebuild(args):

# Arguments when installing the root from sources
root_install_args = install_args + [
"--timer-summary-file=$PWD/install_timers.json",
"--keep-stage",
"--only=package",
"--use-buildcache=package:never,dependencies:only",
Expand Down
4 changes: 4 additions & 0 deletions lib/spack/spack/cmd/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def install_kwargs_from_args(args):
"unsigned": args.unsigned,
"install_deps": ("dependencies" in args.things_to_install),
"install_package": ("package" in args.things_to_install),
"timer_summary_file": args.timer_summary_file,
}


Expand All @@ -89,6 +90,9 @@ def setup_parser(subparser):
help="phase to stop after when installing (default None)",
)
arguments.add_common_arguments(subparser, ["jobs"])
subparser.add_argument(
"--timer-summary-file", help="File to record the top level install timer summary"
)
subparser.add_argument(
"--overwrite",
action="store_true",
Expand Down
31 changes: 30 additions & 1 deletion lib/spack/spack/installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1613,11 +1613,14 @@ def install(self):
# enabled, so that the output does not get cluttered.
term_status = TermStatusLine(enabled=sys.stdout.isatty() and not tty.is_debug())

t = timer.Timer()

while self.build_pq:
task = self._pop_task()
if task is None:
continue

t.start("task")
spack.hooks.on_install_start(task.request.pkg.spec)
install_args = task.request.install_args
keep_prefix = install_args.get("keep_prefix")
Expand Down Expand Up @@ -1663,6 +1666,7 @@ def install(self):
# some package likely depends on it.
if not task.explicit:
if _handle_external_and_upstream(pkg, False):
t.stop("task")
term_status.clear()
self._flag_installed(pkg, task.dependents)
continue
Expand All @@ -1682,6 +1686,7 @@ def install(self):
if self.fail_fast:
raise InstallError(fail_fast_err, pkg=pkg)

t.stop("task")
continue

# Attempt to get a write lock. If we can't get the lock then
Expand All @@ -1690,18 +1695,20 @@ def install(self):
# other process may be hung).
term_title.set("Acquiring lock for {0}".format(pkg.name))
term_status.add(pkg_id)
t.start("aquire_lock")
ltype, lock = self._ensure_locked("write", pkg)
if lock is None:
# Attempt to get a read lock instead. If this fails then
# another process has a write lock so must be (un)installing
# the spec (or that process is hung).
ltype, lock = self._ensure_locked("read", pkg)

t.stop("aquire_lock")
# Requeue the spec if we cannot get at least a read lock so we
# can check the status presumably established by another process
# -- failed, installed, or uninstalled -- on the next pass.
if lock is None:
self._requeue_task(task)
t.stop("task")
continue

term_status.clear()
Expand All @@ -1720,7 +1727,9 @@ def install(self):
# Downgrade to a read lock to preclude other processes from
# uninstalling the package until we're done installing its
# dependents.
t.start("aquire_lock")
ltype, lock = self._ensure_locked("read", pkg)
t.stop("aquire_lock")
if lock is not None:
self._update_installed(task)
path = spack.util.path.debug_padded_filter(pkg.prefix)
Expand All @@ -1741,6 +1750,7 @@ def install(self):
# or uninstalled -- on the next pass.
self.installed.remove(pkg_id)
self._requeue_task(task)
t.stop("task")
continue

# Having a read lock on an uninstalled pkg may mean another
Expand All @@ -1754,10 +1764,12 @@ def install(self):
if ltype == "read":
lock.release_read()
self._requeue_task(task)
t.stop("task")
continue

# Proceed with the installation since we have an exclusive write
# lock on the package.
t.start("install")
term_title.set("Installing {0}".format(pkg.name))
try:
action = self._install_action(task)
Expand All @@ -1775,6 +1787,7 @@ def install(self):
keep_prefix = keep_prefix or (stop_before_phase is None and last_phase is None)

except KeyboardInterrupt as exc:
t.stop("install")
# The build has been terminated with a Ctrl-C so terminate
# regardless of the number of remaining specs.
err = "Failed to install {0} due to {1}: {2}"
Expand All @@ -1783,6 +1796,7 @@ def install(self):
raise

except binary_distribution.NoChecksumException as exc:
t.stop("install")
if task.cache_only:
raise

Expand All @@ -1792,9 +1806,11 @@ def install(self):
tty.error(err.format(pkg.name, str(exc)))
task.use_cache = False
self._requeue_task(task)
t.stop("task")
continue

except (Exception, SystemExit) as exc:
t.stop("install")
self._update_failed(task, True, exc)
spack.hooks.on_install_failure(task.request.pkg.spec)

Expand Down Expand Up @@ -1824,6 +1840,7 @@ def install(self):
failed_explicits.append((pkg, pkg_id, str(exc)))

finally:
t.stop("install")
# Remove the install prefix if anything went wrong during
# install.
if not keep_prefix and not action == InstallAction.OVERWRITE:
Expand All @@ -1837,6 +1854,7 @@ def install(self):
# Perform basic task cleanup for the installed spec to
# include downgrading the write to a read lock
self._cleanup_task(pkg)
t.stop("task")

# Cleanup, which includes releasing all of the read locks
self._cleanup_all_tasks()
Expand All @@ -1849,6 +1867,17 @@ def install(self):
if request.install_args.get("install_package") and request.pkg_id not in self.installed
]

t.stop()

phases = ["{}: {}.".format(p.capitalize(), _hms(t.duration(p))) for p in t.phases]
phases.append("Total: {}".format(_hms(t.duration())))
tty.msg("Installer time summary", " ".join(phases))

timer_summary_file = self.build_requests[0].install_args.get("timer_summary_file")
if timer_summary_file:
with open(timer_summary_file, "w") as fd:
t.write_json(out=fd, extra_attributes={"name": "install_all"})

if failed_explicits or missing:
for _, pkg_id, err in failed_explicits:
tty.error("{0}: {1}".format(pkg_id, err))
Expand Down
33 changes: 27 additions & 6 deletions lib/spack/spack/util/timer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import spack.util.spack_json as sjson

TimerEvent = collections.namedtuple("TimerEvent", ("time", "running", "label"))
TimeTracker = collections.namedtuple("TimeTracker", ("total", "start"))
TimeTracker = collections.namedtuple("TimeTracker", ("total", "start", "count", "path"))

#: name for the global timer (used in start(), stop(), duration() without arguments)
global_timer_name = "_global"
Expand Down Expand Up @@ -141,15 +141,27 @@ def _flatten(self):
if event.running:
if event.label not in self._timer_stack:
self._timer_stack.append(event.label)
tracker = self._timers.get(event.label, TimeTracker(0.0, event.time))
self._timers[event.label] = TimeTracker(tracker.total, event.time)
# Only start the timer if it is on top of the stack
# restart doesn't work after a subtimer is started
if event.label == self._timer_stack[-1]:
timer_path = "/".join(self._timer_stack[1:])
tracker = self._timers.get(
event.label, TimeTracker(0.0, event.time, 0, timer_path)
)
assert tracker.path == timer_path
self._timers[event.label] = TimeTracker(
tracker.total, event.time, tracker.count, tracker.path
)
else: # if not event.running:
if event.label in self._timer_stack:
index = self._timer_stack.index(event.label)
for label in self._timer_stack[index:]:
tracker = self._timers[label]
self._timers[label] = TimeTracker(
tracker.total + (event.time - tracker.start), None
tracker.total + (event.time - tracker.start),
None,
tracker.count + 1,
tracker.path,
)
self._timer_stack = self._timer_stack[: max(0, index)]
# clear events
Expand All @@ -161,12 +173,21 @@ def write_json(self, out=sys.stdout, extra_attributes={}):
data = {
"total": self._timers[global_timer_name].total,
"phases": [
{"name": phase, "seconds": self._timers[phase].total} for phase in self.phases
{
"name": phase,
"path": self._timers[phase].path,
"seconds": self._timers[phase].total,
"count": self._timers[phase].count,
}
for phase in self.phases
],
}
if extra_attributes:
data.update(extra_attributes)
out.write(sjson.dump(data))
if out:
out.write(sjson.dump(data))
else:
return data

def write_tty(self, out=sys.stdout):
"""Write a human-readable summary of timings (depth is 1)"""
Expand Down
2 changes: 1 addition & 1 deletion share/spack/gitlab/cloud_pipelines/configs/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ ci:
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
- - spack python ${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts/common/aggregate_package_logs.spack.py
--config ${SPACK_CONCRETE_ENV_DIR}/spack.yaml
--prefix /home/software/spack,${CI_PROJECT_DIR}/opt/spack,${PWD}
--log install_times.json
${SPACK_ARTIFACTS_ROOT}/user_data/install_times.json
after_script:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,31 +17,22 @@ def find_logs(prefix, filename):
import json
from argparse import ArgumentParser

import spack.config as cfg
import spack.environment as ev

parser = ArgumentParser("aggregate_logs")
parser.add_argument("output_file")
parser.add_argument("--log")
prefix_group = parser.add_mutually_exclusive_group()
prefix_group.add_argument("--prefix")
prefix_group.add_argument("--config")
parser.add_argument("--log", default="install_times.json")
parser.add_argument("--prefix", required=True)

args = parser.parse_args()

if not args.prefix:
tmp_env = ev.create("_", init_file=args.config)
ev.activate(tmp_env)
prefix = cfg.get("config:install_tree:root")
else:
prefix = args.prefix
prefixes = [p for p in args.prefix.split(":") if os.path.exists(p)]

# Aggregate the install timers into a single json
time_logs = find_logs(prefix, args.log)
data = []
for log in time_logs:
with open(log) as fd:
data.append(json.load(fd))
for prefix in prefixes:
time_logs = find_logs(prefix, args.log)
for log in time_logs:
with open(log) as fd:
data.append(json.load(fd))

with open(args.output_file, "w") as fd:
json.dump(data, fd)

0 comments on commit 00463bf

Please sign in to comment.