Skip to content

Commit

Permalink
pipelines 1.13 don't have anymore taskRuns in status, but only
Browse files Browse the repository at this point in the history
childReferences

Signed-off-by: Robert Cerven <rcerven@redhat.com>
  • Loading branch information
rcerven committed Mar 1, 2024
1 parent b0a61b1 commit 0e0e496
Show file tree
Hide file tree
Showing 3 changed files with 388 additions and 466 deletions.
100 changes: 50 additions & 50 deletions osbs/tekton.py
Expand Up @@ -66,25 +66,6 @@ def check_response_json(response, cmd):
return run_json


def get_sorted_task_runs(task_runs: Dict[str, Any]) -> List[Tuple[str, Dict[str, Any]]]:
def custom_key(x):
"""
Handles cases where the startTime key is missing.
These items are put at the end.
"""
missing_start_time = "startTime" not in x[1]["status"]
containing_start_time = (
datetime.strptime(
x[1]["status"]["startTime"], "%Y-%m-%dT%H:%M:%SZ"
).timestamp()
if not missing_start_time
else None
)
return (missing_start_time, containing_start_time)

return sorted(task_runs.items(), key=custom_key)


class Openshift(object):
def __init__(self, openshift_api_url, openshift_oauth_url,
k8s_api_url=None,
Expand Down Expand Up @@ -493,22 +474,16 @@ def get_task_results(self):
if not data:
return task_results

task_runs_status = data['status'].get('taskRuns', {})
for task_run in self.child_references:
task_info = TaskRun(os=self.os,task_run_name=task_run['name']).get_info()

for _, stats in get_sorted_task_runs(task_runs_status):
if not ('status' in stats and 'conditions' in stats['status']):
continue

if stats['status']['conditions'][0]['reason'] != 'Succeeded':
continue
task_name = task_info['metadata']['labels']['tekton.dev/pipelineTask']
results = {}

if 'taskResults' not in stats['status']:
if 'taskResults' not in task_info['status']:
continue

task_name = stats['pipelineTaskName']
results = {}

for result in stats['status']['taskResults']:
for result in task_info['status']['taskResults']:
results[result['name']] = result['value']

task_results[task_name] = results
Expand All @@ -521,9 +496,6 @@ def get_error_message(self):
if not data:
return "pipeline run removed;"

task_runs_status = data['status'].get('taskRuns', {})
sorted_tasks = get_sorted_task_runs(task_runs_status)

plugin_errors = None
annotations_str = None
task_results = self.get_task_results()
Expand All @@ -550,14 +522,16 @@ def get_error_message(self):

pipeline_error = data['status']['conditions'][0].get('message')

for _, stats in sorted_tasks:
task_name = stats['pipelineTaskName']
for task_run in self.child_references:
task_info = TaskRun(os=self.os,task_run_name=task_run['name']).get_info()

task_name = task_info['metadata']['labels']['tekton.dev/pipelineTask']
got_task_error = False
if stats['status']['conditions'][0]['reason'] == 'Succeeded':
if task_info['status']['conditions'][0]['reason'] == 'Succeeded':
continue

if 'steps' in stats['status']:
for step in stats['status']['steps']:
if 'steps' in task_info['status']:
for step in task_info['status']['steps']:
if 'terminated' in step:
exit_code = step['terminated']['exitCode']
if exit_code == 0:
Expand All @@ -578,7 +552,7 @@ def get_error_message(self):

if not got_task_error:
err_message += f"Error in {task_name}: " \
f"{stats['status']['conditions'][0]['message']};\n"
f"{task_info['status']['conditions'][0]['message']};\n"

if not err_message:
if pipeline_error:
Expand Down Expand Up @@ -651,9 +625,11 @@ def _any_task_run_in_state(

def matches_state(task_run: Dict[str, Any]) -> bool:
task_run_status = task_run['status']
task_name = task_run['metadata']['labels']['tekton.dev/pipelineTask']

if 'conditions' not in task_run_status:
logger.debug('conditions are missing from status in task %s : %s',
task_run['pipelineTaskName'], task_run_status)
task_name, task_run_status)
return False

status = task_run_status['conditions'][0]['status']
Expand All @@ -663,13 +639,17 @@ def matches_state(task_run: Dict[str, Any]) -> bool:
if match_state(status, reason, completion_time is not None):
logger.debug(
'Found %s task: name=%s; status=%s; reason=%s; completionTime=%s',
state_name, task_run['pipelineTaskName'], status, reason, completion_time,
state_name, task_name, status, reason, completion_time,
)
return True

return False

task_runs = self.data['status'].get('taskRuns', {}).values()
task_runs = []
for task_run in self.child_references:
task_info = TaskRun(os=self.os,task_run_name=task_run['name']).get_info()
task_runs.append(task_info)

return any(matches_state(tr) for tr in task_runs)

def wait_for_finish(self):
Expand Down Expand Up @@ -702,6 +682,17 @@ def status_status(self):
return None
return data['status']['conditions'][0]['status']

@property
def child_references(self):
data = self.data

if not data:
return []

child_references = data['status'].get('childReferences', [])

return [child for child in child_references if child['kind'] == 'TaskRun']

@property
def pipeline_results(self) -> Dict[str, any]:
"""
Expand Down Expand Up @@ -785,17 +776,24 @@ def wait_for_taskruns(self):
return []

try:
task_runs = pipeline_run['status']['taskRuns']
child_references = self.data['status']['childReferences']
except KeyError:
logger.debug(
"Pipeline run '%s' does not have any task runs yet",
self.pipeline_run_name)
continue
current_task_runs = []
for task_run_name, task_run_data in task_runs.items():

for task_run in child_references:
if task_run['kind'] != 'TaskRun':
continue
task_run_name = task_run['name']
task_info = TaskRun(os=self.os,task_run_name=task_run_name).get_info()
task_name = task_info['metadata']['labels']['tekton.dev/pipelineTask']

if task_run_name not in watched_task_runs:
watched_task_runs.add(task_run_name)
current_task_runs.append((task_run_data['pipelineTaskName'], task_run_name))
current_task_runs.append((task_name, task_run_name))

yield current_task_runs

Expand All @@ -815,12 +813,14 @@ def _get_logs(self):
if not pipeline_run:
return None

task_runs = pipeline_run['status']['taskRuns']
for task_run in self.child_references:

task_run_object = TaskRun(os=self.os, task_run_name=task_run['name'])
task_info = task_run_object.get_info()
pipeline_task_name = task_info['metadata']['labels']['tekton.dev/pipelineTask']

for task_run_name, task_run_data in get_sorted_task_runs(task_runs):
pipeline_task_name = task_run_data['pipelineTaskName']
logs[pipeline_task_name] = task_run_object.get_logs()

logs[pipeline_task_name] = TaskRun(os=self.os, task_run_name=task_run_name).get_logs()
return logs

def _get_logs_stream(self):
Expand Down
65 changes: 38 additions & 27 deletions tests/test_api.py
Expand Up @@ -32,7 +32,7 @@
TEST_TARGET, TEST_USER, TEST_KOJI_TASK_ID, TEST_VERSION,
TEST_PIPELINE_RUN_TEMPLATE, TEST_PIPELINE_REPLACEMENTS_TEMPLATE,
TEST_OCP_NAMESPACE)
from osbs.tekton import PipelineRun
from osbs.tekton import PipelineRun, TaskRun


REQUIRED_BUILD_ARGS = {
Expand Down Expand Up @@ -1033,23 +1033,31 @@ def test_get_build_error_message(self, osbs_binary):
message = [{'key': 'task_result', 'value': 'bad thing'}]
steps = [{'name': 'step1', 'terminated': {'exitCode': 0}},
{'name': 'step2', 'terminated': {'exitCode': 128, 'message': json.dumps(message)}}]
taskruns = {'task1': {'status': {'conditions': [{'reason': 'Succeeded'}],
'startTime': '2022-04-26T15:58:42Z'},
'pipelineTaskName': 'prun-task1'},
'task2': {'status': {'conditions': [{'reason': 'Failed'}],
'steps': steps,
'startTime': '2022-04-26T15:58:42Z'},
'pipelineTaskName': 'prun-task2'},
'task3': {'status': {'conditions': [{'reason': 'Succeeded'}],
'startTime': '2022-04-26T16:58:42Z',
'taskResults': [{'name': 'annotations',
'value': metadata}]},
'pipelineTaskName': 'binary-container-exit'}}

resp = {'metadata': {'name': 'run_name'},
'status': {'taskRuns': taskruns, 'conditions': [{'message': 'error'}]}}

flexmock(PipelineRun).should_receive('get_info').and_return(resp)
taskstat1 = {'conditions': [{'reason': 'Succeeded'}],
'startTime': '2022-04-26T15:58:42Z'}
taskstat2 = {'conditions': [{'reason': 'Failed'}], 'steps': steps,
'startTime': '2022-04-26T15:58:42Z'}
taskstat3 = {'conditions': [{'reason': 'Succeeded'}],
'startTime': '2022-04-26T16:58:42Z',
'taskResults': [{'name': 'annotations', 'value': metadata}]}
childrefs = [{'name': 'task_run_name1', 'kind': 'TaskRun'},
{'name': 'task_run_name2', 'kind': 'TaskRun'},
{'name': 'task_run_name3', 'kind': 'TaskRun'}]

resp1 = {'metadata': {'name': 'run_name'},
'status': {'childReferences': childrefs, 'conditions': [{'message': 'error'}]}}
resp2 = {'metadata': {'labels': {'tekton.dev/pipelineTask': 'prun-task1'}},
'status': taskstat1}
resp3 = {'metadata': {'labels': {'tekton.dev/pipelineTask': 'prun-task2'}},
'status': taskstat2}
resp4 = {'metadata': {'labels': {'tekton.dev/pipelineTask': 'binary-container-exit'}},
'status': taskstat3}

flexmock(PipelineRun).should_receive('get_info').and_return(resp1)
(flexmock(TaskRun).should_receive('get_info')
.and_return(resp2)
.and_return(resp3)
.and_return(resp4))

error_msg = "Error in plugin plugin1: error1;\n"
error_msg += "Error in prun-task2: bad thing;\n"
Expand All @@ -1059,15 +1067,18 @@ def test_get_build_error_message(self, osbs_binary):
'{"platforms": ["x86_64", "ppc64le"]}',
])
def test_get_final_platforms(self, osbs_binary, platforms_result):
taskruns = {'task1': {'status': {'conditions': [{'reason': 'Succeeded'}],
'taskResults': [{'name': 'platforms_result',
'value': platforms_result}],
'startTime': '2022-04-26T15:58:42Z'},
'pipelineTaskName': 'binary-container-prebuild'}}

resp = {'metadata': {'name': 'run_name'}, 'status': {'taskRuns': taskruns}}

flexmock(PipelineRun).should_receive('get_info').and_return(resp)
taskstatus = {'conditions': [{'reason': 'Succeeded'}],
'taskResults': [{'name': 'platforms_result',
'value': platforms_result}],
'startTime': '2022-04-26T15:58:42Z'}
childrefs = [{'name': 'task_run_name', 'kind': 'TaskRun'}]

resp1 = {'metadata': {'name': 'run_name'}, 'status': {'childReferences': childrefs}}
resp2 = {'metadata': {'labels': {'tekton.dev/pipelineTask': 'binary-container-prebuild'}},
'status': taskstatus}

flexmock(PipelineRun).should_receive('get_info').and_return(resp1)
flexmock(TaskRun).should_receive('get_info').and_return(resp2)
assert osbs_binary.get_final_platforms('run_name') == ["x86_64", "ppc64le"]

def test_get_build_results(self, osbs_binary):
Expand Down

0 comments on commit 0e0e496

Please sign in to comment.