Skip to content

Commit

Permalink
improve utility of status line
Browse files Browse the repository at this point in the history
  • Loading branch information
trehn committed Sep 30, 2017
1 parent 6507050 commit f2bfc5d
Show file tree
Hide file tree
Showing 10 changed files with 60 additions and 41 deletions.
22 changes: 10 additions & 12 deletions bundlewrap/bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,23 +47,21 @@ def __lt__(self, other):
return self.name < other.name

@cached_property
@io.job_wrapper(_("{0.node.name} {0.name} parsing bundle"))
def bundle_attrs(self):
if not exists(self.bundle_file):
return {}
else:
with io.job(_(" {node} {bundle} collecting items...").format(
node=self.node.name,
bundle=self.name,
)):
return get_all_attrs_from_file(
self.bundle_file,
base_env={
'node': self.node,
'repo': self.repo,
},
)
return get_all_attrs_from_file(
self.bundle_file,
base_env={
'node': self.node,
'repo': self.repo,
},
)

@cached_property
@io.job_wrapper(_("{0.node.name} {0.name} creating items"))
def items(self):
for item_class in self.repo.item_classes:
for item_name, item_attrs in self.bundle_attrs.get(
Expand Down Expand Up @@ -91,7 +89,7 @@ def make_item(self, attribute_name, item_name, item_attrs):

@cached_property
def metadata_processors(self):
with io.job(_(" {node} {bundle} collecting metadata processors...").format(
with io.job(_("{node} {bundle} collecting metadata processors").format(
node=self.node.name,
bundle=self.name,
)):
Expand Down
11 changes: 5 additions & 6 deletions bundlewrap/cmdline/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,14 @@


def test_items(nodes, ignore_missing_faults):
with io.job(_(" counting items...")):
io.progress_set_total(count_items(nodes))
io.progress_set_total(count_items(nodes))
for node in nodes:
if QUIT_EVENT.is_set():
break
if not node.items:
io.stdout(_("{x} {node} has no items").format(node=bold(node.name), x=yellow("!")))
continue
item_queue = ItemTestQueue(node.items, node.os, node.os_version)
item_queue = ItemTestQueue(node.items, node.name, node.os, node.os_version)
while not QUIT_EVENT.is_set():
try:
item = item_queue.pop()
Expand Down Expand Up @@ -86,7 +85,7 @@ def test_subgroup_loops(repo):
break
if group in checked_groups:
continue
with io.job(_(" {group} checking for subgroup loops...").format(group=group.name)):
with io.job(_("{group} checking for subgroup loops").format(group=group.name)):
checked_groups.extend(group.subgroups) # the subgroups property has the check built in
io.stdout(_("{x} {group} has no subgroup loops").format(
x=green("✓"),
Expand All @@ -95,7 +94,7 @@ def test_subgroup_loops(repo):


def test_metadata_collisions(node):
with io.job(_(" {node} checking for metadata collisions...").format(node=node.name)):
with io.job(_("{node} checking for metadata collisions").format(node=node.name)):
check_for_unsolvable_metadata_key_conflicts(node)
io.stdout(_("{x} {node} has no metadata collisions").format(
x=green("✓"),
Expand Down Expand Up @@ -221,9 +220,9 @@ def test_determinism_metadata(repo, nodes, iterations):
iteration_repo = Repository(repo.path)
iteration_nodes = [iteration_repo.get_node(node.name) for node in nodes]
for node in iteration_nodes:
with io.job(_(" {node} generating metadata ({i}/{n})... ").format(
if QUIT_EVENT.is_set():
break
with io.job(_("{node} generating metadata ({i}/{n})").format(
i=i + 1,
n=iterations,
node=node.name,
Expand Down
3 changes: 2 additions & 1 deletion bundlewrap/deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,8 @@ def _inject_preceded_by_dependencies(items):
return items


def prepare_dependencies(items, node_os, node_os_version):
@io.job_wrapper(_("{1} processing dependencies"))
def prepare_dependencies(items, node_name, node_os, node_os_version):
"""
Performs all dependency preprocessing on a list of items.
"""
Expand Down
4 changes: 2 additions & 2 deletions bundlewrap/itemqueue.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@


class BaseQueue(object):
def __init__(self, items, node_os, node_os_version):
self.items_with_deps = prepare_dependencies(items, node_os, node_os_version)
def __init__(self, items, node_name, node_os, node_os_version):
self.items_with_deps = prepare_dependencies(items, node_name, node_os, node_os_version)
self.items_without_deps = []
self._split()
self.pending_items = []
Expand Down
17 changes: 11 additions & 6 deletions bundlewrap/items/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,9 +319,14 @@ def _skip_with_soft_locks(self, mine, others):
return False

def _test(self):
if self._faults_missing_for_attributes:
self._raise_for_faults()
return self.test()
with io.job(_("{node} {bundle} {item}").format(
bundle=self.bundle.name,
item=self.id,
node=self.node.name,
)):
if self._faults_missing_for_attributes:
self._raise_for_faults()
return self.test()

@classmethod
def _validate_attribute_names(cls, bundle, item_id, attributes):
Expand Down Expand Up @@ -480,7 +485,7 @@ def apply(
if status_code is None:
keys_to_fix = status_before.keys_to_fix
if not interactive:
with io.job(_(" {node} {bundle} {item} fixing...").format(
with io.job(_("{node} {bundle} {item}").format(
bundle=self.bundle.name,
item=self.id,
node=self.node.name,
Expand Down Expand Up @@ -518,7 +523,7 @@ def apply(
),
)
if answer:
with io.job(_(" {node} {bundle} {item} fixing...").format(
with io.job(_("{node} {bundle} {item}").format(
bundle=self.bundle.name,
item=self.id,
node=self.node.name,
Expand Down Expand Up @@ -632,7 +637,7 @@ def get_status(self, cached=True):
Returns an ItemStatus instance describing the current status of
the item on the actual node.
"""
with io.job(_(" {node} {bundle} {item} checking...").format(
with io.job(_("{node} {bundle} {item}").format(
bundle=self.bundle.name,
item=self.id,
node=self.node.name,
Expand Down
4 changes: 2 additions & 2 deletions bundlewrap/items/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def _get_result(
return (self.STATUS_SKIPPED, [_("not triggered")])

if self.unless:
with io.job(_(" {node} {bundle} {item} checking 'unless' condition...").format(
with io.job(_("{node} {bundle} {item} checking 'unless' condition").format(
bundle=self.bundle.name,
item=self.id,
node=self.node.name,
Expand Down Expand Up @@ -171,7 +171,7 @@ def run(self):
else:
data_stdin = None

with io.job(_(" {node} {bundle} {item} running...").format(
with io.job(_("{node} {bundle} RUN {item}").format(
bundle=self.bundle.name,
item=self.id,
node=self.node.name,
Expand Down
8 changes: 4 additions & 4 deletions bundlewrap/lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(self, node, interactive=False, ignore=False):
def __enter__(self):
with tempfile() as local_path:
if not self.ignore:
with io.job(_(" {node} checking hard lock status...").format(node=self.node.name)):
with io.job(_("{node} checking hard lock status").format(node=self.node.name)):
result = self.node.run("mkdir " + quote(HARD_LOCK_PATH), may_fail=True)
if result.return_code != 0:
self.node.download(HARD_LOCK_FILE, local_path)
Expand Down Expand Up @@ -80,7 +80,7 @@ def __enter__(self):
else:
raise NodeLockedException(info)

with io.job(_(" {node} uploading lock file...").format(node=self.node.name)):
with io.job(_("{node} uploading lock file").format(node=self.node.name)):
if self.ignore:
self.node.run("mkdir -p " + quote(HARD_LOCK_PATH))
with open(local_path, 'w') as f:
Expand All @@ -93,7 +93,7 @@ def __enter__(self):
return self

def __exit__(self, type, value, traceback):
with io.job(_(" {node} removing hard lock...").format(node=self.node.name)):
with io.job(_("{node} removing hard lock").format(node=self.node.name)):
result = self.node.run("rm -R {}".format(quote(HARD_LOCK_PATH)), may_fail=True)

if result.return_code != 0:
Expand Down Expand Up @@ -168,7 +168,7 @@ def softlock_add(node, lock_id, comment="", expiry="8h", item_selectors=None):


def softlock_list(node):
with io.job(_(" {} checking soft locks...").format(node.name)):
with io.job(_("{} checking soft locks").format(node.name)):
cat = node.run("cat {}".format(SOFT_LOCK_FILE.format(id="*")), may_fail=True)
if cat.return_code != 0:
return []
Expand Down
7 changes: 3 additions & 4 deletions bundlewrap/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,9 +116,7 @@ def apply_items(
workers=1,
interactive=False,
):
with io.job(_(" {node} processing dependencies...").format(node=node.name)):
item_queue = ItemQueue(node.items, node.os, node.os_version)

item_queue = ItemQueue(node.items, node.name, node.os, node.os_version)
results = []

def tasks_available():
Expand Down Expand Up @@ -363,7 +361,7 @@ def __repr__(self):

@cached_property
def bundles(self):
with io.job(_(" {node} loading bundles...").format(node=self.name)):
with io.job(_("{node} loading bundles").format(node=self.name)):
added_bundles = []
found_bundles = []
for group in self.groups:
Expand Down Expand Up @@ -410,6 +408,7 @@ def group_membership_hash(self):
return hash_statedict(sorted(names(self.groups)))

@cached_property
@io.job_wrapper(_("{0.name} determining groups"))
def groups(self):
_groups = set(self.repo._static_groups_for_node(self))
# lock to avoid infinite recursion when .members_add/remove
Expand Down
6 changes: 3 additions & 3 deletions bundlewrap/repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,15 +478,15 @@ def _build_node_metadata(self):
else:
self._node_metadata_static_complete.add(node_name)

with io.job(_(" {node} building group metadata...").format(node=node.name)):
with io.job(_("{node} building group metadata").format(node=node.name)):
group_order = _flatten_group_hierarchy(node.groups)
for group_name in group_order:
self._node_metadata_partial[node.name] = merge_dict(
self._node_metadata_partial[node.name],
self.get_group(group_name).metadata,
)

with io.job(_(" {node} merging node metadata...").format(node=node.name)):
with io.job(_("{node} merging node metadata").format(node=node.name)):
# deepcopy_metadata is important here because up to this point
# different nodes from the same group might still share objects
# nested deeply in their metadata. This becomes a problem if we
Expand All @@ -507,7 +507,7 @@ def _build_node_metadata(self):
if QUIT_EVENT.is_set():
break
node = self.get_node(node_name)
with io.job(_(" {node} running metadata processors...").format(node=node.name)):
with io.job(_("{node} running metadata processors").format(node=node.name)):
for metadata_processor_name, metadata_processor in node.metadata_processors:
if (node_name, metadata_processor_name) in blacklisted_metaprocs:
continue
Expand Down
19 changes: 18 additions & 1 deletion bundlewrap/utils/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,14 @@ def job(self, job_text):
finally:
self.job_del(job_text)

def job_wrapper(self, job_text):
def outer_wrapper(wrapped_function):
def inner_wrapper(*args, **kwargs):
with self.job(job_text.format(*args, **kwargs)):
return wrapped_function(*args, **kwargs)
return inner_wrapper
return outer_wrapper

def _clear_last_job(self):
if self.jobs and TTY:
write_to_stream(STDOUT_WRITER, "\r\033[K")
Expand Down Expand Up @@ -365,6 +373,15 @@ def _write(self, msg, append_newline=True, err=False):

def _write_current_job(self):
if self.jobs and TTY:
write_to_stream(STDOUT_WRITER, inverse("{} ".format(self.jobs[-1])[:term_width() - 1]))
line = " "
try:
progress = (self.progress / float(self.progress_total))
except ZeroDivisionError:
pass
else:
line += "{:.1f}% ".format(progress * 100)
line += self.jobs[-1]
line += " "
write_to_stream(STDOUT_WRITER, inverse(line[:term_width() - 1]))

io = IOManager()

0 comments on commit f2bfc5d

Please sign in to comment.