Skip to content

Commit

Permalink
Add a flag for when processing time cannot be fully calculated due to…
Browse files Browse the repository at this point in the history
… lack of data
  • Loading branch information
sarah256 committed Aug 7, 2019
1 parent d434a83 commit 28ec4d8
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 15 deletions.
10 changes: 8 additions & 2 deletions estuary/api/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,12 @@ def _get_partial_story(results, reverse=False):
rv['meta']['wait_times'] = wait_times
rv['meta']['total_wait_time'] = total_wait_time
rv['meta']['total_processing_time'] = None
rv['meta']['processing_time_flag'] = False
rv['meta']['total_lead_time'] = 0
try:
rv['meta']['total_processing_time'] = base_instance.get_total_processing_time([item])
total_processing_time, flag = base_instance.get_total_processing_time([item])
rv['meta']['total_processing_time'] = total_processing_time
rv['meta']['processing_time_flag'] = flag
except: # noqa E722
log.exception('Failed to compute total processing time.')
rv['data'][0]['resource_type'] = item.__label__
Expand Down Expand Up @@ -245,9 +248,12 @@ def _get_partial_stories(results, reverse=False):
rv['meta']['wait_times'] = wait_times
rv['meta']['total_wait_time'] = total_wait_time
rv['meta']['total_processing_time'] = None
rv['meta']['processing_time_flag'] = False
rv['meta']['total_lead_time'] = 0
try:
rv['meta']['total_processing_time'] = base_instance.get_total_processing_time([item])
total_processing_time, flag = base_instance.get_total_processing_time([item])
rv['meta']['total_processing_time'] = total_processing_time
rv['meta']['processing_time_flag'] = flag
except: # noqa E722
log.exception('Failed to compute total processing time.')
rv['data'][0]['resource_type'] = item.__label__
Expand Down
15 changes: 11 additions & 4 deletions estuary/utils/story.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,9 +225,10 @@ def get_total_processing_time(self, results):
Get the total time spent processing the story.
:param list results: contains inflated results from Neo4j
:return: the seconds of total time spent processing
:rtype: int
:return: the seconds of total time spent processing with a flag for inaccurate calculations
:rtype: tuple
"""
flag = False
total = 0
# If there is a build in the story, it will be assigned here so that it can later be
# checked to see if it was attached to an advisory in the story
Expand All @@ -252,6 +253,7 @@ def get_total_processing_time(self, results):
'without a creation time.',
artifact.__label__, id_num
)
flag = True
continue

if artifact.__label__.endswith('KojiBuild'):
Expand Down Expand Up @@ -281,6 +283,7 @@ def get_total_processing_time(self, results):
'encountered without a completion time or subsequent build.',
artifact.__label__, id_num
)
flag = True
continue
else:
completion_time = datetime.utcnow()
Expand All @@ -304,7 +307,7 @@ def get_total_processing_time(self, results):
else:
total += processing_time.total_seconds()

return total
return total, flag

def get_total_lead_time(self, results):
"""
Expand Down Expand Up @@ -431,9 +434,12 @@ def format_story_results(self, results, requested_item):
base_instance = BaseStoryManager()
wait_times, total_wait_time = base_instance.get_wait_times(results)
total_processing_time = 0
processing_time_flag = False
total_lead_time = 0
try:
total_processing_time = base_instance.get_total_processing_time(results)
processing_time, flag = base_instance.get_total_processing_time(results)
total_processing_time = processing_time
processing_time_flag = flag
except: # noqa E722
log.exception('Failed to compute total processing time statistic.')
try:
Expand All @@ -451,6 +457,7 @@ def format_story_results(self, results, requested_item):
'wait_times': wait_times,
'total_wait_time': total_wait_time,
'total_processing_time': total_processing_time,
'processing_time_flag': processing_time_flag,
'total_lead_time': total_lead_time
}
}
Expand Down
7 changes: 7 additions & 0 deletions tests/api/test_all_stories.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@
'story_related_nodes_forward': [0, 1, 0, 0],
'total_lead_time': 29852059.0,
'total_processing_time': 1140.0,
'processing_time_flag': False,
'total_wait_time': 36773861.0,
'wait_times': [34095461.0, 2678400.0, None],
'requested_node_index': 0,
Expand Down Expand Up @@ -234,6 +235,7 @@
'story_related_nodes_forward': [0, 1, 0, 0, 0, 0],
'total_lead_time': 43352201.0,
'total_processing_time': 358680.0,
'processing_time_flag': False,
'total_wait_time': 70277497.0,
'wait_times': [34095461.0, 2678400.0, 949260.0, 32554376.0, 338160.0],
'requested_node_index': 0,
Expand Down Expand Up @@ -368,6 +370,7 @@
'story_related_nodes_forward': [0, 1, 0, 0],
'total_lead_time': 29852059.0,
'total_processing_time': 1140.0,
'processing_time_flag': False,
'total_wait_time': 36773861.0,
'wait_times': [34095461.0, 2678400.0, None],
'story_type': 'container'
Expand Down Expand Up @@ -499,6 +502,7 @@
'story_related_nodes_forward': [0, 1, 0, 0],
'total_lead_time': 29945541.0,
'total_processing_time': 1140.0,
'processing_time_flag': False,
'total_wait_time': 36867343.0,
'wait_times': [29938275.0, 6929068.0, None],
'story_type': 'container'
Expand Down Expand Up @@ -659,6 +663,7 @@
'story_related_nodes_forward': [0, 1, 0, 0, 0, 0],
'total_lead_time': 43352201.0,
'total_processing_time': 358680.0,
'processing_time_flag': False,
'total_wait_time': 70277497.0,
'wait_times': [34095461.0, 2678400.0, 949260.0, 32554376.0, 338160.0],
'story_type': 'container'
Expand Down Expand Up @@ -819,6 +824,7 @@
'story_related_nodes_forward':[0, 1, 0, 0, 0, 0],
'total_lead_time': 43445683.0,
'total_processing_time': 358680.0,
'processing_time_flag': False,
'total_wait_time': 70370979.0,
'wait_times': [29938275.0, 6929068.0, 949260.0, 32554376.0, 338160.0],
'story_type': 'container'
Expand Down Expand Up @@ -1058,6 +1064,7 @@ def test_get_stories_fallback(client):
'wait_times': [68897.0],
'total_lead_time': 10440285.0,
'total_processing_time': 10371388.0,
'processing_time_flag': False,
'total_wait_time': 68897.0,
'story_type': 'container'
}
Expand Down
1 change: 1 addition & 0 deletions tests/api/test_auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ def test_get_story_auth(mock_oidc, employee_type):
'story_type': 'container',
'total_lead_time': 0.0,
'total_processing_time': 0.0,
'processing_time_flag': False,
'total_wait_time': 0.0,
'wait_times': [0]
}
Expand Down
12 changes: 12 additions & 0 deletions tests/api/test_story.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -352,6 +353,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -511,6 +513,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -669,6 +672,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -846,6 +850,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -1012,6 +1017,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -1164,6 +1170,7 @@
'story_type': 'container',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 12427125.0,
'wait_times': [2045132.0, 34048.0, 8355885.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -1512,6 +1519,7 @@ def test_get_stories(client, resource, uids, expected):
'story_type': 'module',
'total_lead_time': 10440285.0,
'total_processing_time': 1610820.0,
'processing_time_flag': False,
'total_wait_time': 14511525.0,
'wait_times': [2045132.0, 34048.0, None, 10440285.0, 1036800.0, 50400.0, 955260.0]
}
Expand Down Expand Up @@ -1712,6 +1720,7 @@ def test_get_artifact_story_not_available(client):
'story_type': 'container',
'total_lead_time': 0,
'total_processing_time': 0,
'processing_time_flag': False,
'total_wait_time': 0,
'wait_times': [0]
}
Expand Down Expand Up @@ -1784,6 +1793,7 @@ def test_get_stories_just_artifact(client):
'story_type': 'container',
'total_lead_time': 0,
'total_processing_time': 0,
'processing_time_flag': False,
'total_wait_time': 0,
'wait_times': [0]
}
Expand Down Expand Up @@ -1920,6 +1930,7 @@ def test_get_story_partial_story(client):
'story_type': 'container',
'total_lead_time': 2642400.0,
'total_processing_time': 1605600.0,
'processing_time_flag': False,
'total_wait_time': 1036800.0,
'wait_times': [1036800.0, 50400.0]
}
Expand Down Expand Up @@ -2026,6 +2037,7 @@ def test_get_story_fallback(client):
'story_type': 'container',
'total_lead_time': 8361105.0,
'total_processing_time': 5220.0,
'processing_time_flag': False,
'total_wait_time': 8355885.0,
'wait_times': [8355885.0]
}
Expand Down
27 changes: 18 additions & 9 deletions tests/api/test_timeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,12 @@ def test_full_timeline():

results = [bug, commit, build, advisory, event, c_build, c_advisory]
base_instance = estuary.utils.story.BaseStoryManager()
processing_time = base_instance.get_total_processing_time(results)
processing_time, flag = base_instance.get_total_processing_time(results)
lead_time = base_instance.get_total_lead_time(results)
wait_times, total_wait_time = base_instance.get_wait_times(results)

assert processing_time == 60.0
assert flag is False
assert lead_time == 110.0
assert wait_times == [10.0, 10.0, 10.0, 10.0, 20.0, 10.0]
assert total_wait_time == 50.0
Expand Down Expand Up @@ -97,11 +98,12 @@ def test_full_module_timeline():

results = [bug, commit, build, m_build, advisory]
base_instance = estuary.utils.story.BaseStoryManager()
processing_time = base_instance.get_total_processing_time(results)
processing_time, flag = base_instance.get_total_processing_time(results)
lead_time = base_instance.get_total_lead_time(results)
wait_times, total_wait_time = base_instance.get_wait_times(results)

assert processing_time == 30.0
assert flag is False
assert lead_time == 70.0
assert wait_times == [10.0, 10.0, 10.0, 10.0]
assert total_wait_time == 40.0
Expand Down Expand Up @@ -149,11 +151,12 @@ def test_individual_nodes(label, data, expected):
"""Test the data relating to the timeline with only one node."""
artifact = helpers.make_artifact(label, **data)
base_instance = estuary.utils.story.BaseStoryManager()
processing_time = base_instance.get_total_processing_time([artifact])
processing_time, flag = base_instance.get_total_processing_time([artifact])
lead_time = base_instance.get_total_lead_time([artifact])
wait_times, total_wait_time = base_instance.get_wait_times([artifact])

assert processing_time == expected[0]
assert flag is False
assert lead_time == expected[1]
assert wait_times == expected[2]
assert total_wait_time == expected[3]
Expand Down Expand Up @@ -194,11 +197,12 @@ def test_event_building():
with patch.object(estuary.utils.story, 'datetime', Mock(wraps=datetime)) as datetime_patch:
datetime_patch.utcnow.return_value = datetime(2019, 1, 1, 0, 1, 20, tzinfo=pytz.utc)
base_instance = estuary.utils.story.BaseStoryManager()
processing_time = base_instance.get_total_processing_time(results)
processing_time, flag = base_instance.get_total_processing_time(results)
lead_time = base_instance.get_total_lead_time(results)
wait_times, total_wait_time = base_instance.get_wait_times(results)

assert processing_time == 40.0
assert flag is False
assert lead_time == 80.0
assert wait_times == [10.0, 10.0, 10.0, 10.0]
assert total_wait_time == 40.0
Expand Down Expand Up @@ -244,11 +248,12 @@ def test_build_building():
with patch.object(estuary.utils.story, 'datetime', Mock(wraps=datetime)) as datetime_patch:
datetime_patch.utcnow.return_value = datetime(2019, 1, 1, 0, 1, 50, tzinfo=pytz.utc)
base_instance = estuary.utils.story.BaseStoryManager()
processing_time = base_instance.get_total_processing_time(results)
processing_time, flag = base_instance.get_total_processing_time(results)
lead_time = base_instance.get_total_lead_time(results)
wait_times, total_wait_time = base_instance.get_wait_times(results)

assert processing_time == 70.0
assert flag is False
assert lead_time == 110.0
assert wait_times == [10.0, 10.0, 10.0, 10.0, 20.0]
assert total_wait_time == 40.0
Expand Down Expand Up @@ -283,11 +288,12 @@ def test_advisory_in_qe():
with patch.object(estuary.utils.story, 'datetime', Mock(wraps=datetime)) as datetime_patch:
datetime_patch.utcnow.return_value = datetime(2019, 1, 1, 0, 1, 20, tzinfo=pytz.utc)
base_instance = estuary.utils.story.BaseStoryManager()
processing_time = base_instance.get_total_processing_time(results)
processing_time, flag = base_instance.get_total_processing_time(results)
lead_time = base_instance.get_total_lead_time(results)
wait_times, total_wait_time = base_instance.get_wait_times(results)

assert processing_time == 50.0
assert flag is False
assert lead_time == 80.0
assert wait_times == [10.0, 10.0, 10.0]
assert total_wait_time == 30.0
Expand All @@ -311,14 +317,15 @@ def test_stubbed_artifact(caplog):
results = [bug, commit, build]
base_instance = estuary.utils.story.BaseStoryManager()
base_instance.get_total_lead_time(results)
base_instance.get_total_processing_time(results)
total_processing_time, flag = base_instance.get_total_processing_time(results)

assert caplog.record_tuples == [
('estuary', logging.WARNING, 'While calculating the total lead time, a BugzillaBug with ID '
'1111 was encountered without a creation time.'),
('estuary', logging.WARNING, 'While calculating the total processing time, a KojiBuild with'
' ID 3333 was encountered without a creation time.')
]
assert flag is True


def test_nonlinear_data(caplog):
Expand All @@ -340,7 +347,7 @@ def test_nonlinear_data(caplog):
results = [bug, commit, build]
base_instance = estuary.utils.story.BaseStoryManager()
total_lead_time = base_instance.get_total_lead_time(results)
total_processing_time = base_instance.get_total_processing_time(results)
total_processing_time, flag = base_instance.get_total_processing_time(results)

assert caplog.record_tuples == [
('estuary', logging.WARNING, 'A negative total lead time was calculated, in a story '
Expand All @@ -350,6 +357,7 @@ def test_nonlinear_data(caplog):
]
assert total_lead_time == 0
assert total_processing_time == 0
assert flag is False


def test_event_no_time(caplog):
Expand All @@ -363,13 +371,14 @@ def test_event_no_time(caplog):

results = [event]
base_instance = estuary.utils.story.BaseStoryManager()
total_processing_time = base_instance.get_total_processing_time(results)
total_processing_time, flag = base_instance.get_total_processing_time(results)

assert caplog.record_tuples == [
('estuary', logging.WARNING, 'While calculating the total processing time, a Freshmaker'
'Event with ID 1111 was encountered without a completion time or subsequent build.')
]
assert total_processing_time == 0
assert flag is True


def test_no_attached_build(caplog):
Expand Down

0 comments on commit 28ec4d8

Please sign in to comment.