Skip to content
This repository has been archived by the owner on May 22, 2024. It is now read-only.

Commit

Permalink
Feature 737/merge request job and start job into client webhook (#187)
Browse files Browse the repository at this point in the history
* TqPreprocessor - fix so each part is processed as it comes in, even if out of order.  Delayed creating main build log json, so it will be processed last

* TqPreprocessor - fix so each part is processed as it comes in, even if out of order.  Delayed creating main build log json, so it will be processed last

* ClientLinterCallback - fix so each part is processed as it comes in, even if out of order.  Delayed creating main build log json, so it will be processed last

* timing

* ClientLinterCallback - cleanup of job complete logic.  Move kick off of multi-part merge to deploy
ProjectDeployer - Handles checking for when to do multipart merge.  adding some checking in case race conditions happen for parts not deployed before merge.

* ClientLinterCallback - cleanup of job complete logic.  Move kick off of multi-part merge to deploy
ProjectDeployer - Handles checking for when to do multipart merge.  adding some checking in case race conditions happen for parts not deployed before merge.

* ClientLinterCallback - cleanup of job complete logic.  Move kick off of multi-part merge to deploy
ProjectDeployer - Handles checking for when to do multipart merge.  adding some checking in case race conditions happen for parts not deployed before merge.

* ClientLinterCallback - cleanup of job complete logic.  Move kick off of multi-part merge to deploy
ProjectDeployer - Handles checking for when to do multipart merge.  adding some checking in case race conditions happen for parts not deployed before merge.

* ProjectDeployer - fix deploy flag.

* ProjectDeployer - fix deploy flag.

* ProjectDeployer - fix deploy flag.

* ProjectDeployer - fix deploy flag.

* ProjectDeployer - fix deploy flag.

* fix unit tests.
  • Loading branch information
PhotoNomad0 authored and richmahn committed Oct 13, 2017
1 parent 318f506 commit dcc0d4d
Show file tree
Hide file tree
Showing 4 changed files with 165 additions and 101 deletions.
27 changes: 19 additions & 8 deletions libraries/client/client_linter_callback.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import print_function, unicode_literals
import os
import tempfile
import time
from datetime import datetime
from libraries.app.app import App
from libraries.general_tools import file_utils
Expand Down Expand Up @@ -103,12 +104,12 @@ def process_callback(self):
return build_log

@staticmethod
def upload_build_log(build_log, file_name, output_dir, s3_results_key):
def upload_build_log(build_log, file_name, output_dir, s3_results_key, cache_time=0):
build_log_file = os.path.join(output_dir, file_name)
write_file(build_log_file, build_log)
upload_key = '{0}/{1}'.format(s3_results_key, file_name)
App.logger.debug('Saving build log to ' + upload_key)
App.cdn_s3_handler().upload_file(build_log_file, upload_key, cache_time=0)
App.cdn_s3_handler().upload_file(build_log_file, upload_key, cache_time=cache_time)

@staticmethod
def deploy_if_conversion_finished(s3_results_key, identifier):
Expand All @@ -125,6 +126,7 @@ def deploy_if_conversion_finished(s3_results_key, identifier):
build_log = None
id_parts = identifier.split('/')
multiple_project = len(id_parts) > 3
all_parts_completed = True

if not multiple_project:
App.logger.debug('Single job: checking if convert and lint have completed.')
Expand All @@ -137,9 +139,9 @@ def deploy_if_conversion_finished(s3_results_key, identifier):
build_log = ClientLinterCallback.merge_build_status_for_part(build_log, part_key, output_dir)
if build_log is None:
App.logger.debug('Part {0} not complete'.format(part_key))
break
all_parts_completed = False

if build_log is not None: # if all parts found, save build log and kick off deploy
if all_parts_completed and (build_log is not None): # if all parts found, save build log and kick off deploy
# set overall status
if len(build_log['errors']):
build_log['status'] = 'errors'
Expand All @@ -149,11 +151,12 @@ def deploy_if_conversion_finished(s3_results_key, identifier):
if multiple_project:
build_log['multiple'] = True

ClientLinterCallback.upload_build_log(build_log, "build_log.json", output_dir, s3_results_key)
ClientLinterCallback.upload_build_log(build_log, "final_build_log.json", output_dir, s3_results_key)
ClientLinterCallback.update_project_file(build_log, output_dir)
App.logger.debug('All parts completed, deploying')
App.logger.debug('All parts completed')
else:
App.logger.debug('Not all parts completed')
build_log = None

file_utils.remove_tree(output_dir)
return build_log
Expand Down Expand Up @@ -191,8 +194,8 @@ def update_jobs_table(s3_results_key, build_log, output_dir):

# flag this part as done
ClientLinterCallback.upload_build_log(build_log, 'merged.json', output_dir, s3_results_key)
# trigger deployer to start templating this part of the conversion
ClientLinterCallback.upload_build_log(build_log, 'build_log.json', output_dir, s3_results_key)
# update build_log to start deploy of this part
ClientLinterCallback.upload_build_log(build_log, 'build_log.json', output_dir, s3_results_key, cache_time=600)
return

@staticmethod
Expand All @@ -217,6 +220,14 @@ def merge_build_status_for_part(build_log, s3_results_key, output_dir):
s3_results_key,
"lint_log.json",
linter_file=True)
if not part_build_log_combined:
App.logger.debug('Lint_log.json not found yet for {0}, wait and retry'.format(s3_results_key))
time.sleep(2)
part_build_log_combined = ClientLinterCallback.merge_build_status_for_file(part_build_log,
s3_results_key,
"lint_log.json",
linter_file=True)

if part_build_log_combined:
build_log = ClientLinterCallback.merge_results_logs(build_log, part_build_log_combined,
linter_file=False)
Expand Down
218 changes: 130 additions & 88 deletions libraries/door43_tools/project_deployer.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import time
from glob import glob
from shutil import copyfile

from libraries.general_tools import file_utils
from libraries.general_tools.file_utils import write_file, remove_tree
from libraries.door43_tools.templaters import init_template
Expand Down Expand Up @@ -86,94 +87,17 @@ def deploy_revision_to_door43(self, build_log_key):
App.door43_s3_handler().download_file(template_key, template_file)

if not multi_merge:
App.cdn_s3_handler().download_dir(download_key + '/', source_dir)
source_dir = os.path.join(source_dir, download_key)

elapsed_seconds = int(time.time() - start)
App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds")

html_files = sorted(glob(os.path.join(source_dir, '*.html')))
if len(html_files) < 1:
content = ''
if len(build_log['errors']) > 0:
content += """
<div style="text-align:center;margin-bottom:20px">
<i class="fa fa-times-circle-o" style="font-size: 250px;font-weight: 300;color: red"></i>
<br/>
<h2>Critical!</h2>
<h3>Here is what went wrong with this build:</h3>
</div>
"""
content += '<div><ul><li>' + '</li><li>'.join(build_log['errors']) + '</li></ul></div>'
else:
content += '<h1 class="conversion-requested">{0}</h1>'.format(build_log['message'])
content += '<p><i>No content is available to show for {0} yet.</i></p>'.format(repo_name)
html = """
<html lang="en">
<head>
<title>{0}</title>
</head>
<body>
<div id="content">{1}</div>
</body>
</html>""".format(repo_name, content)
repo_index_file = os.path.join(source_dir, 'index.html')
write_file(repo_index_file, html)

# merge the source files with the template
templater = init_template(resource_type, source_dir, output_dir, template_file)

try:
self.run_templater(templater)
except Exception as e:
App.logger.error("Error applying template {0} to resource type {1}".format(template_file,
resource_type))
self.close()
return False

# update index of templated files
index_json_fname = 'index.json'
index_json = self.get_templater_index(s3_commit_key, index_json_fname)
App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:256])
self.update_index_key(index_json, templater, 'titles')
self.update_index_key(index_json, templater, 'chapters')
self.update_index_key(index_json, templater, 'book_codes')
App.logger.debug("final 'index.json': " + json.dumps(index_json)[:256])
out_file = os.path.join(output_dir, index_json_fname)
write_file(out_file, index_json)
App.cdn_s3_handler().upload_file(out_file, s3_commit_key + '/' + index_json_fname)
source_dir, success = self.deploy_single_conversion(build_log, download_key, output_dir, repo_name,
resource_type, s3_commit_key, source_dir, start,
template_file)
if not success:
return

else:
# merge multi-part project
App.door43_s3_handler().download_dir(download_key + '/', source_dir) # get previous templated files
source_dir = os.path.join(source_dir, download_key)
files = sorted(glob(os.path.join(source_dir, '*.*')))
for f in files:
App.logger.debug("Downloaded: " + f)

fname = os.path.join(source_dir, 'index.html')
if os.path.isfile(fname):
os.remove(fname) # remove index if already exists

elapsed_seconds = int(time.time() - start)
App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds")

templater = init_template(resource_type, source_dir, output_dir, template_file)

# restore index from previous passes
index_json = self.get_templater_index(s3_commit_key, 'index.json')
templater.titles = index_json['titles']
templater.chapters = index_json['chapters']
templater.book_codes = index_json['book_codes']
templater.already_converted = templater.files # do not reconvert files

# merge the source files with the template
try:
self.run_templater(templater)
except Exception as e:
App.logger.error("Error multi-part applying template {0} to resource type {1}".format(template_file,
resource_type))
self.close()
source_dir, success = self.deploy_multipart_master(s3_commit_key, resource_type, download_key, output_dir,
source_dir, start, template_file)
if not success:
return False

# Copy first HTML file to index.html if index.html doesn't exist
Expand All @@ -198,7 +122,7 @@ def deploy_revision_to_door43(self, build_log_key):

# save master build_log.json
file_utils.write_file(os.path.join(output_dir, 'build_log.json'), build_log)
App.logger.debug("Final build_log.json:\n" + json.dumps(build_log))
App.logger.debug("Final build_log.json:\n" + json.dumps(build_log)[:256])

# Upload all files to the door43.org bucket
for root, dirs, files in os.walk(output_dir):
Expand All @@ -218,20 +142,138 @@ def deploy_revision_to_door43(self, build_log_key):
to_key='{0}/manifest.json'.format(s3_repo_key))
App.door43_s3_handler().redirect(s3_repo_key, '/' + s3_commit_key)
App.door43_s3_handler().redirect(s3_repo_key + '/index.html', '/' + s3_commit_key)
self.write_data_to_file(output_dir, s3_commit_key, 'deployed', ' ') # flag that deploy has finished
except:
pass

else:
else: # if processing part
if App.cdn_s3_handler().key_exists(s3_commit_key + '/final_build_log.json'):
App.logger.debug("final build detected")
App.logger.debug("conversions all finished, trigger final merge")
App.cdn_s3_handler().copy(from_key=s3_commit_key + '/final_build_log.json',
to_key=s3_commit_key + '/build_log.json')

self.write_data_to_file(output_dir, download_key, 'deployed', ' ') # flag that deploy has finished

elapsed_seconds = int(time.time() - start)
App.logger.debug("deploy completed in " + str(elapsed_seconds) + " seconds")
App.logger.debug("deploy type partial={0}, multi_merge={1}".format(partial, multi_merge))
App.logger.debug("deploy completed in {0} seconds".format(elapsed_seconds))
self.close()
return True

def deploy_multipart_master(self, s3_commit_key, resource_type, download_key, output_dir, source_dir, start,
template_file):
prefix = download_key + '/'
undeployed = self.get_undeployed_parts(prefix)
if len(undeployed) > 0:
App.logger.debug("Parts not deployed: {0}".format(undeployed))

App.door43_s3_handler().download_dir(prefix, source_dir) # get previous templated files
source_dir = os.path.join(source_dir, download_key)
files = sorted(glob(os.path.join(source_dir, '*.*')))
for f in files:
App.logger.debug("Downloaded: " + f)
fname = os.path.join(source_dir, 'index.html')
if os.path.isfile(fname):
os.remove(fname) # remove index if already exists
elapsed_seconds = int(time.time() - start)
App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds")
templater = init_template(resource_type, source_dir, output_dir, template_file)
# restore index from previous passes
index_json = self.get_templater_index(s3_commit_key, 'index.json')
templater.titles = index_json['titles']
templater.chapters = index_json['chapters']
templater.book_codes = index_json['book_codes']
templater.already_converted = templater.files # do not reconvert files
# merge the source files with the template
try:
self.run_templater(templater)
success = True
except Exception as e:
App.logger.error("Error multi-part applying template {0} to resource type {1}".format(template_file,
resource_type))
self.close()
success = False
return source_dir, success

def get_undeployed_parts(self, prefix):
unfinished = []
for o in App.cdn_s3_handler().get_objects(prefix=prefix, suffix='/build_log.json'):
parts = o.key.split(prefix)
if len(parts) == 2:
parts = parts[1].split('/')
if len(parts) > 1:
part_num = parts[0]
deployed_key = prefix + part_num + '/deployed'
if not App.cdn_s3_handler().key_exists(deployed_key):
App.logger.debug("Part {0} unfinished".format(part_num))
unfinished.append(part_num)
return unfinished

def deploy_single_conversion(self, build_log, download_key, output_dir, repo_name, resource_type, s3_commit_key,
source_dir, start, template_file):
App.cdn_s3_handler().download_dir(download_key + '/', source_dir)
source_dir = os.path.join(source_dir, download_key)
elapsed_seconds = int(time.time() - start)
App.logger.debug("deploy download completed in " + str(elapsed_seconds) + " seconds")
html_files = sorted(glob(os.path.join(source_dir, '*.html')))
if len(html_files) < 1:
content = ''
if len(build_log['errors']) > 0:
content += """
<div style="text-align:center;margin-bottom:20px">
<i class="fa fa-times-circle-o" style="font-size: 250px;font-weight: 300;color: red"></i>
<br/>
<h2>Critical!</h2>
<h3>Here is what went wrong with this build:</h3>
</div>
"""
content += '<div><ul><li>' + '</li><li>'.join(build_log['errors']) + '</li></ul></div>'
else:
content += '<h1 class="conversion-requested">{0}</h1>'.format(build_log['message'])
content += '<p><i>No content is available to show for {0} yet.</i></p>'.format(repo_name)
html = """
<html lang="en">
<head>
<title>{0}</title>
</head>
<body>
<div id="content">{1}</div>
</body>
</html>""".format(repo_name, content)
repo_index_file = os.path.join(source_dir, 'index.html')
write_file(repo_index_file, html)

# merge the source files with the template
templater = init_template(resource_type, source_dir, output_dir, template_file)
try:
self.run_templater(templater)
success = True
except Exception as e:
App.logger.error("Error applying template {0} to resource type {1}".format(template_file,
resource_type))
self.close()
success = False

if success:
# update index of templated files
index_json_fname = 'index.json'
index_json = self.get_templater_index(s3_commit_key, index_json_fname)
App.logger.debug("initial 'index.json': " + json.dumps(index_json)[:256])
self.update_index_key(index_json, templater, 'titles')
self.update_index_key(index_json, templater, 'chapters')
self.update_index_key(index_json, templater, 'book_codes')
App.logger.debug("final 'index.json': " + json.dumps(index_json)[:256])
self.write_data_to_file(output_dir, s3_commit_key, index_json_fname, index_json)
return source_dir, success

def write_data_to_file(self, output_dir, s3_commit_key, fname, data):
out_file = os.path.join(output_dir, fname)
write_file(out_file, data)
key = s3_commit_key + '/' + fname
App.logger.debug("Writing {0} to {1}': ".format(fname, key))
App.cdn_s3_handler().upload_file(out_file, key, cache_time=0)

def run_templater(self, templater): # for test purposes
templater.run()

Expand Down
11 changes: 6 additions & 5 deletions tests/client_tests/test_client_linter_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def test_callbackMultpleJob(self):
results = linter_cb.process_callback()

# then
self.validate_results_and_log(results, linter_cb, expected_success, expected_status)
self.validate_results_and_log(results, linter_cb, expected_success, expected_status, final=False)

def test_callbackMultpleJob_first_merged(self):
# given
Expand Down Expand Up @@ -390,12 +390,13 @@ def test_callbackMultpleJob_last_job_LintNotFinished(self):
# helpers
#

def validate_results_and_log(self, results, linter_cb, expected_success, expected_status):
def validate_results_and_log(self, results, linter_cb, expected_success, expected_status, final=True):
self.validate_results(results, linter_cb)
self.validate_build_log(expected_status, expected_success)
self.validate_build_log(expected_status, expected_success, final=final)

def validate_build_log(self, expected_status, expected_success):
key = "{0}/{1}".format(self.lint_callback_data['s3_results_key'], 'build_log.json')
def validate_build_log(self, expected_status, expected_success, final=True):
build_log_path = 'build_log.json' if not final else 'final_build_log.json'
key = "{0}/{1}".format(self.lint_callback_data['s3_results_key'], build_log_path)
build_log = App.cdn_s3_handler().get_json(key)
self.assertEquals(build_log['success'], expected_success)
self.assertEquals(build_log['status'], expected_status)
Expand Down
Loading

0 comments on commit dcc0d4d

Please sign in to comment.