Skip to content
This repository has been archived by the owner on May 22, 2024. It is now read-only.

Commit

Permalink
Feature 737/merge request job and start job into client webhook (#181)
Browse files Browse the repository at this point in the history
* ClientConverterCallback - setting final job conversion state

* ClientConverterCallback - setting final job conversion state

* tweak

* TestClientLinterCallback - added validation that build log contains final status and success

* TestClientLinterCallback - added validation that build log contains final status and success

* TestClientLinterCallback - removed code no longer used.

* changed converter output to got to convert_log.json so it doesn't kick off deployer before linter is finished.

* Updated unit tests.

* ClientLinterCallback - fixed logic for multiple.

* ClientLinterCallback - fixed logic for multiple.
  • Loading branch information
PhotoNomad0 authored and richmahn committed Oct 12, 2017
1 parent 2f219a2 commit 6bba957
Show file tree
Hide file tree
Showing 5 changed files with 100 additions and 164 deletions.
155 changes: 30 additions & 125 deletions libraries/client/client_converter_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
import os
import tempfile
from datetime import datetime
from libraries.app.app import App
from libraries.client.client_linter_callback import ClientLinterCallback
from libraries.general_tools.file_utils import unzip, write_file, remove_tree, remove
Expand Down Expand Up @@ -55,6 +56,7 @@ def process_callback(self):
part_id = None
multiple_project = False

self.job.ended_at = datetime.utcnow()
self.job.success = self.success
for message in self.log:
self.job.log_message(message)
Expand All @@ -69,6 +71,24 @@ def process_callback(self):
else:
self.job.log_message('{0} function returned successfully.'.format(self.job.convert_module))

if not self.success or len(self.job.errors):
self.job.success = False
self.job.status = "failed"
message = "Conversion failed"
App.logger.debug("Conversion failed, success: {0}, errors: {1}".format(self.success, self.job.errors))
elif len(self.job.warnings) > 0:
self.job.success = True
self.job.status = "warnings"
message = "Conversion successful with warnings"
else:
self.job.success = True
self.job.status = "success"
message = "Conversion successful"

self.job.message = message
self.job.log_message(message)
self.job.log_message('Finished job {0} at {1}'.format(self.job.job_id, self.job.ended_at.strftime("%Y-%m-%dT%H:%M:%SZ")))

s3_commit_key = 'u/{0}/{1}/{2}'.format(self.job.user_name, self.job.repo_name, self.job.commit_id)
upload_key = s3_commit_key
if multiple_project:
Expand Down Expand Up @@ -104,15 +124,15 @@ def process_callback(self):
self.upload_converted_files(upload_key, unzip_dir)

if multiple_project:
# Now download the existing build_log.json file, update it and upload it back to S3
build_log_json = self.update_build_log(s3_commit_key, part_id + "/")
# Now download the existing build_log.json file, update it and upload it back to S3 as convert_log
build_log_json = self.update_convert_log(s3_commit_key, part_id + "/")

# mark current part as finished
self.cdn_upload_contents({}, s3_commit_key + '/' + part_id + '/finished')

else: # single part conversion
# Now download the existing build_log.json file, update it and upload it back to S3
build_log_json = self.update_build_log(s3_commit_key)
# Now download the existing build_log.json file, update it and upload it back to S3 as convert_log
build_log_json = self.update_convert_log(s3_commit_key)

self.cdn_upload_contents({}, s3_commit_key + '/finished') # flag finished

Expand All @@ -124,90 +144,6 @@ def process_callback(self):
remove_tree(self.temp_dir) # cleanup
return build_log_json

def merge_build_logs(self, s3_commit_key, count, prefix=''):
master_build_log_json = self.get_build_log(s3_commit_key)
App.logger.debug('Initial build_log.json: ' + json.dumps(master_build_log_json))
build_logs_json = []
self.job.status = 'success'
self.job.log = self.get_list_from_dict(master_build_log_json, 'log')
self.job.warnings = self.get_list_from_dict(master_build_log_json, 'warnings')
self.job.errors = self.get_list_from_dict(master_build_log_json, 'errors')
for i in range(0, count):
# App.logger.debug('Merging part {0}'.format(i))

# Now download the existing build_log.json file
part = str(i) + "/"
build_log_json = self.get_build_log(s3_commit_key, part)

self.build_log_sanity_check(build_log_json)

build_logs_json.append(build_log_json)

if 'book' in build_log_json:
book = build_log_json['book']
elif 'commit_id' in build_log_json:
book = build_log_json['commit_id'] # if no book then use commit_id
else:
book = 'part_' + str(i) # generate dummy name

# merge build_log data
for message in self.prefix_list(build_log_json, 'log', book):
self.job.log_message(message)
for message in self.prefix_list(build_log_json, 'errors', book):
self.job.error_message(message)
for message in self.prefix_list(build_log_json, 'warnings', book):
self.job.warning_message(message)
if ('status' in build_log_json) and (build_log_json['status'] != 'success'):
self.job.status = build_log_json['status']
if ('success' in build_log_json) and (build_log_json['success'] is not None):
self.job.success = build_log_json['success']
if ('message' in build_log_json) and (build_log_json['message'] is not None):
self.job.message = build_log_json['message']

# set overall status
if len(self.job.errors):
self.job.status = 'errors'
elif len(self.job.warnings):
self.job.status = 'warnings'

# Now upload the merged build_log.json file, update it and upload it back to S3
master_build_log_json['build_logs'] = build_logs_json # add record of all the parts
build_logs_json0 = build_logs_json[0]
master_build_log_json['commit_id'] = build_logs_json0['commit_id']
master_build_log_json['created_at'] = build_logs_json0['created_at']
master_build_log_json['started_at'] = build_logs_json0['started_at']
master_build_log_json['repo_owner'] = build_logs_json0['repo_owner']
master_build_log_json['repo_name'] = build_logs_json0['repo_name']
master_build_log_json['resource_type'] = build_logs_json0['resource_type']
build_log_json = self.upload_build_log(master_build_log_json, s3_commit_key, prefix)
return build_log_json

@staticmethod
def get_list_from_dict(dictionary, key):
return dictionary[key] if key in dictionary else []

@staticmethod
def prefix_list(build_log_json, key, book):
if key not in build_log_json:
return []

items = build_log_json[key]
for i in range(0, len(items)):
item = items[i]
new_text = book + ': ' + item
items[i] = new_text
return items

@staticmethod
def build_log_sanity_check(build_log_json):
# sanity check
if 'log' not in build_log_json:
build_log_json['log'] = []
if 'warnings' not in build_log_json:
build_log_json['warnings'] = []
if 'errors' not in build_log_json:
build_log_json['errors'] = []

def unzip_converted_files(self, converted_zip_file):
unzip_dir = tempfile.mkdtemp(prefix='unzip_', dir=self.temp_dir)
try:
Expand All @@ -227,43 +163,12 @@ def upload_converted_files(s3_commit_key, unzip_dir):
App.logger.debug('Uploading {0} to {1}'.format(f, key))
App.cdn_s3_handler().upload_file(path, key, cache_time=0)

def update_project_file(self):
project_json_key = 'u/{0}/{1}/project.json'.format(self.job.user_name, self.job.repo_name)
project_json = App.cdn_s3_handler().get_json(project_json_key)
project_json['user'] = self.job.user_name
project_json['repo'] = self.job.repo_name
project_json['repo_url'] = 'https://{0}/{1}/{2}'.format(App.gogs_url, self.job.user_name, self.job.repo_name)
commit = {
'id': self.job.commit_id,
'created_at': self.job.created_at.strftime("%Y-%m-%dT%H:%M:%SZ"),
'status': self.job.status,
'success': self.job.success,
'started_at': None,
'ended_at': None
}
if self.job.started_at:
commit['started_at'] = self.job.started_at.strftime("%Y-%m-%dT%H:%M:%SZ")
if self.job.ended_at:
commit['ended_at'] = self.job.ended_at.strftime("%Y-%m-%dT%H:%M:%SZ")
if 'commits' not in project_json:
project_json['commits'] = []
commits = []
for c in project_json['commits']:
if c['id'] != self.job.commit_id:
commits.append(c)
commits.append(commit)
project_json['commits'] = commits
project_file = os.path.join(self.temp_dir, 'project.json')
write_file(project_file, project_json)
App.cdn_s3_handler().upload_file(project_file, project_json_key, cache_time=0)
return project_json

def update_build_log(self, s3_base_key, part=''):
def update_convert_log(self, s3_base_key, part=''):
build_log_json = self.get_build_log(s3_base_key, part)
self.upload_build_log(build_log_json, s3_base_key, part)
self.upload_convert_log(build_log_json, s3_base_key, part)
return build_log_json

def upload_build_log(self, build_log_json, s3_base_key, part=''):
def upload_convert_log(self, build_log_json, s3_base_key, part=''):
if self.job.started_at:
build_log_json['started_at'] = self.job.started_at.strftime("%Y-%m-%dT%H:%M:%SZ")
else:
Expand All @@ -287,7 +192,7 @@ def upload_build_log(self, build_log_json, s3_base_key, part=''):
build_log_json['errors'] = self.job.errors
else:
build_log_json['errors'] = []
build_log_key = self.get_build_log_key(s3_base_key, part)
build_log_key = self.get_build_log_key(s3_base_key, part, name='convert_log.json')
App.logger.debug('Writing build log to ' + build_log_key)
# App.logger.debug('build_log contents: ' + json.dumps(build_log_json))
self.cdn_upload_contents(build_log_json, build_log_key)
Expand All @@ -307,6 +212,6 @@ def get_build_log(self, s3_base_key, part=''):
return build_log_json

@staticmethod
def get_build_log_key(s3_base_key, part=''):
upload_key = '{0}/{1}build_log.json'.format(s3_base_key, part)
def get_build_log_key(s3_base_key, part='', name='build_log.json'):
upload_key = '{0}/{1}{2}'.format(s3_base_key, part, name)
return upload_key
12 changes: 7 additions & 5 deletions libraries/client/client_linter_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ def process_callback(self):
build_log['warnings'].append(msg)
App.logger.error(msg)
else:
App.logger.debug("Linter {0} {1} warnings:\n{1}".format(self.identifier, len(self.warnings), '\n'.join(self.warnings[:5])))
App.logger.debug("Linter {0} {1} warnings:\n{1}".format(self.identifier, len(self.warnings),
'\n'.join(self.warnings[:5])))

has_warnings = len(build_log['warnings']) > 0
if has_warnings:
Expand Down Expand Up @@ -126,7 +127,7 @@ def deploy_if_conversion_finished(s3_results_key, identifier):
multiple_project = len(id_parts) > 3

if not multiple_project:
App.logger.debug('Single job: checking if convert and lint have complete.')
App.logger.debug('Single job: checking if convert and lint have completed.')
build_log = ClientLinterCallback.merge_build_status_for_part(build_log, s3_results_key, output_dir)
else:
App.logger.debug('Multiple parts: Checking if all parts completed.')
Expand All @@ -145,7 +146,8 @@ def deploy_if_conversion_finished(s3_results_key, identifier):
elif len(build_log['warnings']):
build_log['status'] = 'warnings'
build_log['ended_at'] = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
build_log['multiple'] = True
if multiple_project:
build_log['multiple'] = True

ClientLinterCallback.upload_build_log(build_log, "build_log.json", output_dir, s3_results_key)
ClientLinterCallback.update_project_file(build_log, output_dir)
Expand Down Expand Up @@ -206,7 +208,7 @@ def merge_build_status_for_part(build_log, s3_results_key, output_dir):
App.logger.debug('Convert not finished for {0}'.format(s3_results_key))
return None

part_build_log = ClientLinterCallback.get_results(s3_results_key, "build_log.json")
part_build_log = ClientLinterCallback.get_results(s3_results_key, "convert_log.json")
if part_build_log:
part_build_log_combined = ClientLinterCallback.merge_build_status_for_file(part_build_log,
s3_results_key,
Expand All @@ -220,7 +222,7 @@ def merge_build_status_for_part(build_log, s3_results_key, output_dir):
else:
App.logger.debug('Lint_log.json not found for {0}'.format(s3_results_key))
else:
App.logger.debug('Build_log.json not found for {0}'.format(s3_results_key))
App.logger.debug('convert_log.json not found for {0}'.format(s3_results_key))

return None

Expand Down
6 changes: 3 additions & 3 deletions libraries/door43_tools/project_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def deploy_revision_to_door43(self, build_log_key):
return False

start = time.time()
App.logger.debug("Deploying, build log: " + json.dumps(build_log))
App.logger.debug("Deploying, build log: " + json.dumps(build_log)[:256])

user = build_log['repo_owner']
repo_name = build_log['repo_name']
Expand Down Expand Up @@ -134,11 +134,11 @@ def deploy_revision_to_door43(self, build_log_key):
# update index of templated files
index_json_fname = 'index.json'
index_json = self.get_templater_index(s3_commit_key, index_json_fname)
App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:120])
App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:256])
self.update_index_key(index_json, templater, 'titles')
self.update_index_key(index_json, templater, 'chapters')
self.update_index_key(index_json, templater, 'book_codes')
App.logger.debug("final 'index.json': " + json.dumps(index_json)[:120])
App.logger.debug("final 'index.json': " + json.dumps(index_json)[:256])
out_file = os.path.join(output_dir, index_json_fname)
write_file(out_file, index_json)
App.cdn_s3_handler().upload_file(out_file, s3_commit_key + '/' + index_json_fname)
Expand Down
2 changes: 1 addition & 1 deletion tests/client_tests/test_client_converter_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def test_client_converter_callback_multiple_job_complete_error(self, mock_downlo
self.validate_results(expect_error, results)

@patch('libraries.client.client_converter_callback.download_file')
def test_client_converter_callback_multiple_noo_jobs_complete(self, mock_download_file):
def test_client_converter_callback_multiple_not_jobs_complete(self, mock_download_file):
# given
self.source_zip = os.path.join(self.resources_dir, "raw_sources/en-ulb.zip")
identifier = 'job1/2/0/01-GEN.usfm'
Expand Down
Loading

0 comments on commit 6bba957

Please sign in to comment.