From fc72f1d9276a30d361601198e3cedc5f99664cb2 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 19 Dec 2022 10:36:14 -0600 Subject: [PATCH 001/134] Update celery task config Signed-off-by: Andrew Brain --- augur/tasks/init/celery_app.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index ca0c7cc0ce..7041305231 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -46,15 +46,13 @@ redis_db_number, redis_conn_string = get_redis_conn_values() -task_annotations = {'*': {'rate_limit': '5/s'}} - # initialize the celery app BROKER_URL = f'{redis_conn_string}{redis_db_number}' BACKEND_URL = f'{redis_conn_string}{redis_db_number+1}' -celery_app = Celery('tasks', broker=BROKER_URL, backend=BACKEND_URL, include=tasks, - CELERY_ANNOTATIONS=task_annotations, worker_pool_restarts=True) +celery_app = Celery('tasks', broker=BROKER_URL, backend=BACKEND_URL, include=tasks) +# define the queues that tasks will be put in (by default tasks are put in celery queue) celery_app.conf.task_routes = { 'augur.tasks.git.facade_tasks.*': {'queue': 'cpu'} } @@ -62,6 +60,22 @@ #Setting to be able to see more detailed states of running tasks celery_app.conf.task_track_started = True +#ignore task results by default +celery_app.conf.task_ignore_result = True + +# store task erros even if the task result is ignored +celery_app.conf.task_store_errors_even_if_ignored = True + +# set task default rate limit +celery_app.conf.task_default_rate_limit = '5/s' + +# set tasks annotations for rate limiting specific tasks +celery_app.conf.task_annotations = None + +# allow workers to be restarted remotely +celery_app.conf.worker_pool_restarts = True + + def split_tasks_into_groups(augur_tasks: List[str]) -> Dict[str, List[str]]: """Split tasks on the celery app into groups. From a0535b0d55fcbab25491ce122132a11d1eb280f3 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 29 Dec 2022 09:47:31 -0600 Subject: [PATCH 002/134] check to not subscript null value for relase info in get_release_inf Signed-off-by: Isaac Milarsky --- augur/tasks/github/releases/core.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 49e6d3f46b..c4fdd96782 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -16,9 +16,17 @@ def get_release_inf(session, repo_id, release, tag_only): if not tag_only: - name = "" if release['author']['name'] is None else release['author']['name'] - company = "" if release['author']['company'] is None else release['author']['company'] - author = name + '_' + company + + if release['author'] is None: + author = 'No Author Available.' + name = "N/A" + company = "N/A" + else: + name = "" if release['author']['name'] is None else release['author']['name'] + company = "" if release['author']['company'] is None else release['author']['company'] + author = name + '_' + company + + release_inf = { 'release_id': release['id'], 'repo_id': repo_id, From 1b9460fc880117f93901e92e3911cd1b6b727aae Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Thu, 29 Dec 2022 11:36:27 -0600 Subject: [PATCH 003/134] Fixing this issue: ``` Traceback (most recent call last): File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 451, in trace_task R = retval = fun(*args, **kwargs) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 734, in __protected_call__ return self.run(*args, **kwargs) File "/home/sean/github/rh-augur-new-dev/augur/tasks/data_analysis/__init__.py", line 16, in machine_learning_phase logger = logging.getLogger(machine_learning_phase.__name__) NameError: name 'logging' is not defined ``` --- augur/tasks/data_analysis/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index cdcfbedd18..cdedabdbb4 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -9,6 +9,7 @@ from augur.application.db.util import execute_session_query from celery import group, chain, chord, signature from augur.tasks.init.celery_app import celery_app as celery +import logging @celery.task def machine_learning_phase(): From e7ea83ed07fee502d637b185a1db1d2d8c3de5d6 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 29 Dec 2022 12:50:07 -0600 Subject: [PATCH 004/134] outfactor initial committer grabbing Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 6 +++--- augur/tasks/start_tasks.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 7403ee45f6..7834b3ed05 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -70,8 +70,8 @@ def facade_analysis_init_facade_task(): session.log_activity('Info',f"Beginning analysis.") @celery.task -def grab_comitter_list_facade_task(repo_id,platform="github"): - logger = logging.getLogger(grab_comitter_list_facade_task.__name__) +def grab_comitters(repo_id,platform="github"): + logger = logging.getLogger(grab_comitters.__name__) try: grab_committer_list(GithubTaskSession(logger), repo_id,platform) @@ -255,7 +255,7 @@ def generate_analysis_sequence(logger): analysis_sequence.append(facade_analysis_init_facade_task.si().on_error(facade_error_handler.s())) for repo in repos: session.logger.info(f"Generating sequence for repo {repo['repo_id']}") - analysis_sequence.append(grab_comitter_list_facade_task.si(repo['repo_id']).on_error(facade_error_handler.s())) + #analysis_sequence.append(grab_comitter_list_facade_task.si(repo['repo_id']).on_error(facade_error_handler.s())) analysis_sequence.append(trim_commits_facade_task.si(repo['repo_id']).on_error(facade_error_handler.s())) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index a51eb3a43f..c08dfe0e9d 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -73,7 +73,7 @@ def repo_collect_phase(): second_tasks_repo = group(collect_events.si(repo.repo_git), collect_github_messages.si(repo.repo_git),process_pull_request_files.si(repo.repo_git), process_pull_request_commits.si(repo.repo_git)) - repo_chain = chain(first_tasks_repo,second_tasks_repo) + repo_chain = chain(grab_comitters(repo.repo_id),first_tasks_repo,second_tasks_repo) issue_dependent_tasks.append(repo_chain) repo_task_group = group( From 1dae0508e815230944bf9e8f4893fc62f00f2a72 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 29 Dec 2022 13:25:21 -0600 Subject: [PATCH 005/134] minor analysis log fix and phase error caught Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 22 ++++------------------ augur/tasks/start_tasks.py | 13 +++++++------ 2 files changed, 11 insertions(+), 24 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 7834b3ed05..243b5d69f5 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -121,6 +121,10 @@ def update_analysis_log(repos_id,status): working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) session.execute_sql(remove_commit) session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") + + # Start the main analysis + + update_analysis_log(repo_id,'Collecting data') @celery.task def trim_commits_post_analysis_facade_task(repo_id,commits): @@ -177,28 +181,10 @@ def analyze_commits_in_parallel(queue: list, repo_id: int, repo_location: str, m """Take a large list of commit data to analyze and store in the database. Meant to be run in parallel with other instances of this task. """ - ### Local helper functions ### #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) session = FacadeSession(logger) - def update_analysis_log(repos_id,status): - - # Log a repo's analysis status - - log_message = s.sql.text("""INSERT INTO analysis_log (repos_id,status) - VALUES (:repo_id,:status)""").bindparams(repo_id=repos_id,status=status) - - try: - session.execute_sql(log_message) - except: - pass - - - # Start the main analysis - - update_analysis_log(repo_id,'Collecting data') - for analyzeCommit in queue: diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index c08dfe0e9d..be21866da2 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -35,7 +35,7 @@ #Predefine phases. For new phases edit this and the config to reflect. #The domain of tasks ran should be very explicit. -@celery.task +#@celery.task def prelim_phase(): logger = logging.getLogger(prelim_phase.__name__) @@ -51,9 +51,10 @@ def prelim_phase(): #preliminary_task_list = [detect_github_repo_move.si()] preliminary_tasks = group(*tasks_with_repo_domain) - preliminary_tasks.apply_async() + #preliminary_tasks.apply_async() + return preliminary_tasks -@celery.task +#@celery.task def repo_collect_phase(): logger = logging.getLogger(repo_collect_phase.__name__) @@ -73,7 +74,7 @@ def repo_collect_phase(): second_tasks_repo = group(collect_events.si(repo.repo_git), collect_github_messages.si(repo.repo_git),process_pull_request_files.si(repo.repo_git), process_pull_request_commits.si(repo.repo_git)) - repo_chain = chain(grab_comitters(repo.repo_id),first_tasks_repo,second_tasks_repo) + repo_chain = chain(grab_comitters.si(repo.repo_id),first_tasks_repo,second_tasks_repo) issue_dependent_tasks.append(repo_chain) repo_task_group = group( @@ -83,7 +84,7 @@ def repo_collect_phase(): collect_releases.si() ) - chain(repo_task_group, refresh_materialized_views.si()).apply_async() + return chain(repo_task_group, refresh_materialized_views.si()) DEFINED_COLLECTION_PHASES = [prelim_phase, repo_collect_phase] @@ -138,7 +139,7 @@ def start_data_collection(self): #Add the phase to the sequence in order as a celery task. #The preliminary task creates the larger task chain - augur_collection_sequence.append(job.si()) + augur_collection_sequence.append(job()) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) From b0bea46be6ce49064007fdff816094325ffd4d36 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 29 Dec 2022 14:26:24 -0600 Subject: [PATCH 006/134] Fix facade phase errors Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 243b5d69f5..4da4e69aa7 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -125,6 +125,7 @@ def update_analysis_log(repos_id,status): # Start the main analysis update_analysis_log(repo_id,'Collecting data') + logger.info(f"Got past repo {repo_id}") @celery.task def trim_commits_post_analysis_facade_task(repo_id,commits): @@ -185,11 +186,13 @@ def analyze_commits_in_parallel(queue: list, repo_id: int, repo_location: str, m logger = logging.getLogger(analyze_commits_in_parallel.__name__) session = FacadeSession(logger) - + logger.info(f"Got to analysis!") for analyzeCommit in queue: analyze_commit(session, repo_id, repo_location, analyzeCommit) + + logger.info("Analysis complete") @celery.task def nuke_affiliations_facade_task(): @@ -243,7 +246,7 @@ def generate_analysis_sequence(logger): session.logger.info(f"Generating sequence for repo {repo['repo_id']}") #analysis_sequence.append(grab_comitter_list_facade_task.si(repo['repo_id']).on_error(facade_error_handler.s())) - analysis_sequence.append(trim_commits_facade_task.si(repo['repo_id']).on_error(facade_error_handler.s())) + analysis_sequence.append(trim_commits_facade_task.si(repo['repo_id'])) #Get the huge list of commits to process. @@ -295,7 +298,7 @@ def generate_analysis_sequence(logger): # Find commits which are out of the analysis range trimmed_commits = existing_commits - parent_commits - analysis_sequence.append(trim_commits_post_analysis_facade_task.si(repo['repo_id'],list(trimmed_commits)).on_error(facade_error_handler.s())) + analysis_sequence.append(trim_commits_post_analysis_facade_task.si(repo['repo_id'],list(trimmed_commits))) analysis_sequence.append(facade_analysis_end_facade_task.si().on_error(facade_error_handler.s())) @@ -320,7 +323,7 @@ def generate_contributor_sequence(logger): contrib_group = group(contributor_sequence) contrib_group.link_error(facade_error_handler.s()) - return chain(facade_start_contrib_analysis_task.si(),) + return contrib_group#chain(facade_start_contrib_analysis_task.si(), contrib_group) @@ -374,7 +377,7 @@ def generate_facade_chain(logger): facade_sequence.extend(generate_analysis_sequence(logger)) #Generate contributor analysis task group. - facade_sequence.append(generate_contributor_sequence(logger)) + #facade_sequence.append(generate_contributor_sequence(logger)) if nuke_stored_affiliations: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) From 136649337cc993b09f2bef56e4ac2f408999a54c Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 29 Dec 2022 14:29:52 -0600 Subject: [PATCH 007/134] undo move of grab_committers Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 4 ++-- augur/tasks/start_tasks.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 4da4e69aa7..16d111f12b 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -244,8 +244,8 @@ def generate_analysis_sequence(logger): analysis_sequence.append(facade_analysis_init_facade_task.si().on_error(facade_error_handler.s())) for repo in repos: session.logger.info(f"Generating sequence for repo {repo['repo_id']}") - #analysis_sequence.append(grab_comitter_list_facade_task.si(repo['repo_id']).on_error(facade_error_handler.s())) - + analysis_sequence.append(grab_comitters.si(repo['repo_id']).on_error(facade_error_handler.s())) + #grab_comitters.si(repo.repo_id), analysis_sequence.append(trim_commits_facade_task.si(repo['repo_id'])) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index be21866da2..114ca9fc44 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -74,7 +74,7 @@ def repo_collect_phase(): second_tasks_repo = group(collect_events.si(repo.repo_git), collect_github_messages.si(repo.repo_git),process_pull_request_files.si(repo.repo_git), process_pull_request_commits.si(repo.repo_git)) - repo_chain = chain(grab_comitters.si(repo.repo_id),first_tasks_repo,second_tasks_repo) + repo_chain = chain(first_tasks_repo,second_tasks_repo) issue_dependent_tasks.append(repo_chain) repo_task_group = group( From 985f0f56003c816331a20b15073aa95589e6f8bb Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 29 Dec 2022 14:32:53 -0600 Subject: [PATCH 008/134] Clean up syntax Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/__init__.py | 4 ++-- augur/tasks/start_tasks.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index cdcfbedd18..2c1c0a5e49 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -10,7 +10,6 @@ from celery import group, chain, chord, signature from augur.tasks.init.celery_app import celery_app as celery -@celery.task def machine_learning_phase(): logger = logging.getLogger(machine_learning_phase.__name__) @@ -40,4 +39,5 @@ def machine_learning_phase(): task_chain = chain(*ml_tasks) - task_chain.apply_async() \ No newline at end of file + #task_chain.apply_async() + return task_chain \ No newline at end of file diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 114ca9fc44..f5a48e3112 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -35,7 +35,6 @@ #Predefine phases. For new phases edit this and the config to reflect. #The domain of tasks ran should be very explicit. -#@celery.task def prelim_phase(): logger = logging.getLogger(prelim_phase.__name__) @@ -54,7 +53,6 @@ def prelim_phase(): #preliminary_tasks.apply_async() return preliminary_tasks -#@celery.task def repo_collect_phase(): logger = logging.getLogger(repo_collect_phase.__name__) From 2a617637ac0f134ba9223454bab5628d547a3c52 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 5 Jan 2023 14:43:49 -0600 Subject: [PATCH 009/134] Pr files fix (#2108) * scaling fix for repo_move Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Change to rabbitmq broker Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * don't ignore result Signed-off-by: Isaac Milarsky * More logging in detect_github_repo_move Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * print Signed-off-by: Isaac Milarsky * re-add facade contributors to task queue Signed-off-by: Isaac Milarsky * better handling and logging files model Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky --- augur/application/cli/config.py | 6 ++++- augur/application/config.py | 3 +++ augur/tasks/git/facade_tasks.py | 2 +- augur/tasks/github/detect_move/tasks.py | 13 +++++++---- .../tasks/github/util/gh_graphql_entities.py | 4 ++-- augur/tasks/init/__init__.py | 10 ++++++++ augur/tasks/init/celery_app.py | 6 ++--- augur/tasks/start_tasks.py | 13 +++-------- augur/tasks/util/worker_util.py | 6 +++-- scripts/install/config.sh | 23 +++++++++++++++++-- 10 files changed, 61 insertions(+), 25 deletions(-) diff --git a/augur/application/cli/config.py b/augur/application/cli/config.py index dca75ad115..3d93d363d7 100644 --- a/augur/application/cli/config.py +++ b/augur/application/cli/config.py @@ -27,9 +27,10 @@ def cli(): @click.option('--facade-repo-directory', help="Directory on the database server where Facade should clone repos", envvar=ENVVAR_PREFIX + 'FACADE_REPO_DIRECTORY') @click.option('--gitlab-api-key', help="GitLab API key for data collection from the GitLab API", envvar=ENVVAR_PREFIX + 'GITLAB_API_KEY') @click.option('--redis-conn-string', help="String to connect to redis cache", envvar=ENVVAR_PREFIX + 'REDIS_CONN_STRING') +@click.option('--rabbitmq-conn-string', help="String to connect to rabbitmq broker", envvar=ENVVAR_PREFIX + 'RABBITMQ_CONN_STRING') @test_connection @test_db_connection -def init_config(github_api_key, facade_repo_directory, gitlab_api_key, redis_conn_string): +def init_config(github_api_key, facade_repo_directory, gitlab_api_key, redis_conn_string, rabbitmq_conn_string): if not github_api_key: @@ -89,6 +90,9 @@ def init_config(github_api_key, facade_repo_directory, gitlab_api_key, redis_con default_config["Redis"]["connection_string"] = redis_conn_string + if rabbitmq_conn_string: + default_config["RabbitMQ"]["connection_string"] = rabbitmq_conn_string + default_config["Keys"] = keys default_config["Facade"]["repo_directory"] = facade_repo_directory diff --git a/augur/application/config.py b/augur/application/config.py index 58c2fa3eea..ee1cdff367 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -71,6 +71,9 @@ def get_development_flag(): "cache_group": 0, "connection_string": "redis://127.0.0.1:6379/" }, + "RabbitMQ": { + "connection_string": "amqp://augur:password123@localhost:5672/augur_vhost" + }, "Tasks": { "collection_interval": 2592000 }, diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 16d111f12b..fbe4783b01 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -377,7 +377,7 @@ def generate_facade_chain(logger): facade_sequence.extend(generate_analysis_sequence(logger)) #Generate contributor analysis task group. - #facade_sequence.append(generate_contributor_sequence(logger)) + facade_sequence.append(generate_contributor_sequence(logger)) if nuke_stored_affiliations: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index 475ebbc523..2acc440747 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -6,10 +6,15 @@ @celery.task -def detect_github_repo_move(repo_git: str) -> None: +def detect_github_repo_move(repo_git_identifiers : str) -> None: logger = logging.getLogger(detect_github_repo_move.__name__) + logger.info(f"Starting repo_move operation with {repo_git_identifiers}") with GithubTaskSession(logger) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - ping_github_for_repo_move(session, repo) \ No newline at end of file + #Ping each repo with the given repo_git to make sure + #that they are still in place. + for repo_git in repo_git_identifiers: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + logger.info(f"Pinging repo: {repo_git}") + ping_github_for_repo_move(session, repo) \ No newline at end of file diff --git a/augur/tasks/github/util/gh_graphql_entities.py b/augur/tasks/github/util/gh_graphql_entities.py index 3323bcf3be..137bac06d3 100644 --- a/augur/tasks/github/util/gh_graphql_entities.py +++ b/augur/tasks/github/util/gh_graphql_entities.py @@ -341,8 +341,8 @@ def __iter__(self): self.logger.error( ''.join(traceback.format_exception(None, e, e.__traceback__))) - data = self.request_graphql_dict(variables=params) - coreData = self.extract_paginate_result(data) + self.logger.info(f"Graphql paramters: {params}") + return if int(coreData['totalCount']) == 0: diff --git a/augur/tasks/init/__init__.py b/augur/tasks/init/__init__.py index 36486d08bb..eb590a99ab 100644 --- a/augur/tasks/init/__init__.py +++ b/augur/tasks/init/__init__.py @@ -18,3 +18,13 @@ def get_redis_conn_values(): redis_conn_string += "/" return redis_db_number, redis_conn_string + +def get_rabbitmq_conn_string(): + logger = logging.getLogger(__name__) + + with DatabaseSession(logger) as session: + config = AugurConfig(logger, session) + + rabbbitmq_conn_string = config.get_value("RabbitMQ", "connection_string") + + return rabbbitmq_conn_string diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index b4dacc9c66..457c913184 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -12,7 +12,7 @@ from augur.application.logs import TaskLogConfig from augur.application.db.session import DatabaseSession from augur.application.db.engine import get_database_string -from augur.tasks.init import get_redis_conn_values +from augur.tasks.init import get_redis_conn_values, get_rabbitmq_conn_string logger = logging.getLogger(__name__) @@ -48,7 +48,7 @@ redis_db_number, redis_conn_string = get_redis_conn_values() # initialize the celery app -BROKER_URL = f'{redis_conn_string}{redis_db_number}' +BROKER_URL = get_rabbitmq_conn_string()#f'{redis_conn_string}{redis_db_number}' BACKEND_URL = f'{redis_conn_string}{redis_db_number+1}' celery_app = Celery('tasks', broker=BROKER_URL, backend=BACKEND_URL, include=tasks) @@ -62,7 +62,7 @@ celery_app.conf.task_track_started = True #ignore task results by default -celery_app.conf.task_ignore_result = True +##celery_app.conf.task_ignore_result = True # store task erros even if the task result is ignored celery_app.conf.task_store_errors_even_if_ignored = True diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index f5a48e3112..c3d29c4535 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -38,20 +38,13 @@ def prelim_phase(): logger = logging.getLogger(prelim_phase.__name__) - - tasks_with_repo_domain = [] - + with DatabaseSession(logger) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') + repo_git_list = [repo.repo_git for repo in repos] - for repo in repos: - tasks_with_repo_domain.append(detect_github_repo_move.si(repo.repo_git)) - - #preliminary_task_list = [detect_github_repo_move.si()] - preliminary_tasks = group(*tasks_with_repo_domain) - #preliminary_tasks.apply_async() - return preliminary_tasks + return create_grouped_task_load(dataList=repo_git_list,task=detect_github_repo_move) def repo_collect_phase(): logger = logging.getLogger(repo_collect_phase.__name__) diff --git a/augur/tasks/util/worker_util.py b/augur/tasks/util/worker_util.py index 960ff9cc47..8efac1db7b 100644 --- a/augur/tasks/util/worker_util.py +++ b/augur/tasks/util/worker_util.py @@ -16,8 +16,10 @@ def create_grouped_task_load(*args,processes=8,dataList=[],task=None): if not dataList or not task: raise AssertionError - numpyData = np.array(list(dataList)) - listsSplitForProcesses = np.array_split(numpyData, processes) + print(f"Splitting {len(dataList)} items") + #numpyData = np.array(list(dataList)) + listsSplitForProcesses = np.array_split(list(dataList), processes) + print("Done splitting items.") #print("args") #print(args) diff --git a/scripts/install/config.sh b/scripts/install/config.sh index 962c865707..10a1645cf3 100755 --- a/scripts/install/config.sh +++ b/scripts/install/config.sh @@ -87,6 +87,14 @@ function get_facade_repo_path() { [[ "${facade_repo_directory}" != */ ]] && facade_repo_directory="${facade_repo_directory}/" } +function get_rabbitmq_broker_url(){ + echo + echo "Please provide your rabbitmq broker url." + echo "** This is required for Augur to run all collection tasks. ***" + read -p "broker_url: " rabbitmq_conn_string + echo +} + function create_config(){ @@ -146,13 +154,24 @@ function create_config(){ echo "Please unset AUGUR_FACADE_REPO_DIRECTORY if you would like to be prompted for the facade repo directory" facade_repo_directory=$AUGUR_FACADE_REPO_DIRECTORY fi + + if [[ -z "${RABBITMQ_CONN_STRING}" ]] + then + get_rabbitmq_broker_url + else + echo + echo "Found RABBITMQ_CONN_STRING environment variable with value $RABBITMQ_CONN_STRING" + echo "Using it in the config" + echo "Please unset RABBITMQ_CONN_STRING if you would like to be prompted for the facade repo directory" + rabbitmq_conn_string=$RABBITMQ_CONN_STRING + fi #special case for docker entrypoint if [ $target = "docker" ]; then - cmd=( augur config init --github-api-key $github_api_key --gitlab-api-key $gitlab_api_key --facade-repo-directory $facade_repo_directory --redis-conn-string $redis_conn_string ) + cmd=( augur config init --github-api-key $github_api_key --gitlab-api-key $gitlab_api_key --facade-repo-directory $facade_repo_directory --redis-conn-string $redis_conn_string --rabbitmq-conn-string $rabbitmq_conn_string ) echo "init with redis $redis_conn_string" else - cmd=( augur config init --github-api-key $github_api_key --gitlab-api-key $gitlab_api_key --facade-repo-directory $facade_repo_directory ) + cmd=( augur config init --github-api-key $github_api_key --gitlab-api-key $gitlab_api_key --facade-repo-directory $facade_repo_directory --rabbitmq-conn-string $rabbitmq_conn_string ) fi From 159f59cef0c81037e7698b210f65017613ab215f Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sun, 15 Jan 2023 12:57:45 -0600 Subject: [PATCH 010/134] RabbitMQ Broker Changes and Scaling Improvements (#2114) * scaling fix for repo_move Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Change to rabbitmq broker Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * don't ignore result Signed-off-by: Isaac Milarsky * More logging in detect_github_repo_move Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * print Signed-off-by: Isaac Milarsky * re-add facade contributors to task queue Signed-off-by: Isaac Milarsky * better handling and logging files model Signed-off-by: Isaac Milarsky * take advantage of rabbitmq allowing us to use celery result Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * get rid of redundant definition Signed-off-by: Isaac Milarsky * docs update Signed-off-by: Isaac Milarsky * Change celery task scheduling to not scale proportionally to the amount of repos Signed-off-by: Isaac Milarsky * analysis sequence pooling for facade scaling Signed-off-by: Isaac Milarsky * need to fix issues with accessing redis Signed-off-by: Isaac Milarsky * don't create so many sessions Signed-off-by: Isaac Milarsky * Update Signed-off-by: Isaac Milarsky * doc update * fix facade date query error Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * updating MQ and REDIS Docs * Updates to docs. * documentation updatese * test * documentation updates * doc hell * trying * analyze_commits_in_parallel now shows progress in quarters in the logs. Also applied same scaling changes to facade contributor resolution in insert_facade_contributors Signed-off-by: Isaac Milarsky * sql format Signed-off-by: Isaac Milarsky * Typo Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky Co-authored-by: Isaac Milarsky Co-authored-by: Isaac Milarsky --- augur/tasks/data_analysis/__init__.py | 20 +- augur/tasks/git/facade_tasks.py | 167 +++++++++------ .../facade_worker/facade03analyzecommit.py | 6 +- .../facade_worker/facade07rebuildcache.py | 3 +- augur/tasks/github/detect_move/tasks.py | 2 +- augur/tasks/github/events/tasks.py | 44 ++-- .../contributor_interface.py | 11 +- augur/tasks/github/facade_github/tasks.py | 192 +++++++++--------- augur/tasks/github/issues/tasks.py | 36 ++-- augur/tasks/github/messages/tasks.py | 31 +-- .../pull_requests/commits_model/tasks.py | 19 +- .../github/pull_requests/files_model/tasks.py | 19 +- augur/tasks/github/pull_requests/tasks.py | 29 +-- augur/tasks/github/releases/core.py | 2 +- augur/tasks/github/repo_info/tasks.py | 20 +- augur/tasks/start_tasks.py | 42 ++-- augur/tasks/util/worker_util.py | 2 + docs/new-install.md | 17 +- docs/source/docker/docker-compose.rst | 58 +----- docs/source/docker/getting-started.rst | 14 +- docs/source/docker/quick-start.rst | 22 +- .../getting-started/collecting-data.rst | 2 +- 22 files changed, 391 insertions(+), 367 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 1d0877cdaf..3324137668 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -1,9 +1,3 @@ -from augur.tasks.data_analysis.clustering_worker.tasks import clustering_model -from augur.tasks.data_analysis.contributor_breadth_worker.contributor_breadth_worker import contributor_breadth_model -from augur.tasks.data_analysis.discourse_analysis.tasks import discourse_analysis_model -from augur.tasks.data_analysis.insight_worker.tasks import insight_model -from augur.tasks.data_analysis.message_insights.tasks import message_insight_model -from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo from augur.application.db.util import execute_session_query @@ -11,7 +5,15 @@ from augur.tasks.init.celery_app import celery_app as celery import logging +@celery.task def machine_learning_phase(): + from augur.tasks.data_analysis.clustering_worker.tasks import clustering_model + from augur.tasks.data_analysis.contributor_breadth_worker.contributor_breadth_worker import contributor_breadth_model + from augur.tasks.data_analysis.discourse_analysis.tasks import discourse_analysis_model + from augur.tasks.data_analysis.insight_worker.tasks import insight_model + from augur.tasks.data_analysis.message_insights.tasks import message_insight_model + from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model + logger = logging.getLogger(machine_learning_phase.__name__) @@ -40,5 +42,7 @@ def machine_learning_phase(): task_chain = chain(*ml_tasks) - #task_chain.apply_async() - return task_chain \ No newline at end of file + result = task_chain.apply_async() + with allow_join_result(): + return result.get() + #return task_chain \ No newline at end of file diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index fbe4783b01..bcb329554d 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -70,17 +70,18 @@ def facade_analysis_init_facade_task(): session.log_activity('Info',f"Beginning analysis.") @celery.task -def grab_comitters(repo_id,platform="github"): +def grab_comitters(repo_id_list,platform="github"): logger = logging.getLogger(grab_comitters.__name__) - try: - grab_committer_list(GithubTaskSession(logger), repo_id,platform) - except Exception as e: - logger.error(f"Could not grab committers from github endpoint!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - + for repo_id in repo_id_list: + try: + grab_committer_list(GithubTaskSession(logger), repo_id,platform) + except Exception as e: + logger.error(f"Could not grab committers from github endpoint!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + @celery.task -def trim_commits_facade_task(repo_id): +def trim_commits_facade_task(repo_id_list): logger = logging.getLogger(trim_commits_facade_task.__name__) session = FacadeSession(logger) @@ -96,39 +97,39 @@ def update_analysis_log(repos_id,status): except: pass + for repo_id in repo_id_list: + session.inc_repos_processed() + update_analysis_log(repo_id,"Beginning analysis.") + # First we check to see if the previous analysis didn't complete - session.inc_repos_processed() - update_analysis_log(repo_id,"Beginning analysis.") - # First we check to see if the previous analysis didn't complete + get_status = s.sql.text("""SELECT working_commit FROM working_commits WHERE repos_id=:repo_id + """).bindparams(repo_id=repo_id) - get_status = s.sql.text("""SELECT working_commit FROM working_commits WHERE repos_id=:repo_id - """).bindparams(repo_id=repo_id) + try: + working_commits = session.fetchall_data_from_sql_text(get_status) + except: + working_commits = [] - try: - working_commits = session.fetchall_data_from_sql_text(get_status) - except: - working_commits = [] - - # If there's a commit still there, the previous run was interrupted and - # the commit data may be incomplete. It should be trimmed, just in case. - for commit in working_commits: - trim_commit(session, repo_id,commit['working_commit']) - - # Remove the working commit. - remove_commit = s.sql.text("""DELETE FROM working_commits - WHERE repos_id = :repo_id AND - working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) - session.execute_sql(remove_commit) - session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") - - # Start the main analysis + # If there's a commit still there, the previous run was interrupted and + # the commit data may be incomplete. It should be trimmed, just in case. + for commit in working_commits: + trim_commit(session, repo_id,commit['working_commit']) + + # Remove the working commit. + remove_commit = s.sql.text("""DELETE FROM working_commits + WHERE repos_id = :repo_id AND + working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) + session.execute_sql(remove_commit) + session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") - update_analysis_log(repo_id,'Collecting data') - logger.info(f"Got past repo {repo_id}") + # Start the main analysis + + update_analysis_log(repo_id,'Collecting data') + logger.info(f"Got past repo {repo_id}") @celery.task -def trim_commits_post_analysis_facade_task(repo_id,commits): +def trim_commits_post_analysis_facade_task(commits): logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) session = FacadeSession(logger) @@ -142,24 +143,32 @@ def update_analysis_log(repos_id,status): session.execute_sql(log_message) + repo_ids = [] - update_analysis_log(repo_id,'Data collection complete') - - update_analysis_log(repo_id,'Beginning to trim commits') - - session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(commits)}") - for commit in commits: - trim_commit(session,repo_id,commit) - set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' - """).bindparams(repo_id=repo_id) + for commit in commits: + repo_id = commit[1] + if repo_id not in repo_ids: + update_analysis_log(repo_id,'Data collection complete') + + update_analysis_log(repo_id,'Beginning to trim commits') + + session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(commits)}") + + repo_ids.append(repo_id) + + trim_commit(session,repo_id,commit[0]) - session.execute_sql(set_complete) + for repo_id in repo_ids: + set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' + """).bindparams(repo_id=repo_id) - update_analysis_log(repo_id,'Commit trimming complete') + session.execute_sql(set_complete) - update_analysis_log(repo_id,'Complete') + update_analysis_log(repo_id,'Commit trimming complete') + + update_analysis_log(repo_id,'Complete') @celery.task def facade_analysis_end_facade_task(): @@ -178,20 +187,29 @@ def facade_start_contrib_analysis_task(): #enable celery multithreading @celery.task -def analyze_commits_in_parallel(queue: list, repo_id: int, repo_location: str, multithreaded: bool)-> None: +def analyze_commits_in_parallel(queue: list, multithreaded: bool)-> None: """Take a large list of commit data to analyze and store in the database. Meant to be run in parallel with other instances of this task. """ #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) - session = FacadeSession(logger) logger.info(f"Got to analysis!") + session = FacadeSession(logger) + for count, commitTuple in enumerate(queue): + + #Log progress when another quarter of the queue has been processed + if (count + 1) % int(len(queue) / 4) == 0: + logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") - for analyzeCommit in queue: + query = session.query(Repo).filter(Repo.repo_id == commitTuple[1]) + repo = execute_session_query(query,'one') + + + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + + analyze_commit(session, commitTuple[1], repo_loc, commitTuple[0]) - analyze_commit(session, repo_id, repo_location, analyzeCommit) - logger.info("Analysis complete") @celery.task @@ -241,12 +259,20 @@ def generate_analysis_sequence(logger): start_date = session.get_setting('start_date') - analysis_sequence.append(facade_analysis_init_facade_task.si().on_error(facade_error_handler.s())) + repo_ids = [repo['repo_id'] for repo in repos] + + analysis_sequence.append(facade_analysis_init_facade_task.si()) + + analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=grab_comitters)) + + analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=trim_commits_facade_task)) + + all_missing_commits = [] + all_trimmed_commits = [] + + for repo in repos: session.logger.info(f"Generating sequence for repo {repo['repo_id']}") - analysis_sequence.append(grab_comitters.si(repo['repo_id']).on_error(facade_error_handler.s())) - #grab_comitters.si(repo.repo_id), - analysis_sequence.append(trim_commits_facade_task.si(repo['repo_id'])) #Get the huge list of commits to process. @@ -289,20 +315,28 @@ def generate_analysis_sequence(logger): if len(missing_commits) > 0: #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) - #Split commits into mostly equal queues so each process starts with a workload and there is no - # overhead to pass into queue from the parent. - contrib_jobs = create_grouped_task_load(repo['repo_id'],repo_loc,True,dataList=list(missing_commits),task=analyze_commits_in_parallel) - contrib_jobs.link_error(facade_error_handler.s()) - analysis_sequence.append(contrib_jobs) + #encode the repo_id with the commit. + commits_with_repo_tuple = [(commit,repo['repo_id']) for commit in list(missing_commits)] + #Get all missing commits into one large list to split into task pools + all_missing_commits.extend(commits_with_repo_tuple) # Find commits which are out of the analysis range trimmed_commits = existing_commits - parent_commits - analysis_sequence.append(trim_commits_post_analysis_facade_task.si(repo['repo_id'],list(trimmed_commits))) + + trimmed_commits_with_repo_tuple = [(commit,repo['repo_id']) for commit in list(trimmed_commits)] + all_trimmed_commits.extend(trimmed_commits_with_repo_tuple) + + + if all_missing_commits: + analysis_sequence.append(create_grouped_task_load(True,dataList=all_missing_commits,task=analyze_commits_in_parallel)) + + if all_trimmed_commits: + analysis_sequence.append(create_grouped_task_load(dataList=all_trimmed_commits,task=trim_commits_post_analysis_facade_task)) - analysis_sequence.append(facade_analysis_end_facade_task.si().on_error(facade_error_handler.s())) + analysis_sequence.append(facade_analysis_end_facade_task.si()) - #print(f"Analysis sequence: {analysis_sequence}") + logger.info(f"Analysis sequence: {analysis_sequence}") return analysis_sequence @@ -310,6 +344,7 @@ def generate_analysis_sequence(logger): def generate_contributor_sequence(logger): contributor_sequence = [] + all_repo_ids = [] with FacadeSession(logger) as session: #contributor_sequence.append(facade_start_contrib_analysis_task.si()) @@ -318,10 +353,11 @@ def generate_contributor_sequence(logger): all_repos = session.fetchall_data_from_sql_text(query) #pdb.set_trace() #breakpoint() - for repo in all_repos: - contributor_sequence.append(insert_facade_contributors.si(repo['repo_id'])) + #for repo in all_repos: + # contributor_sequence.append(insert_facade_contributors.si(repo['repo_id'])) + all_repo_ids = [repo['repo_id'] for repo in all_repos] - contrib_group = group(contributor_sequence) + contrib_group = create_grouped_task_load(dataList=all_repo_ids,task=insert_facade_contributors)#group(contributor_sequence) contrib_group.link_error(facade_error_handler.s()) return contrib_group#chain(facade_start_contrib_analysis_task.si(), contrib_group) @@ -392,5 +428,6 @@ def generate_facade_chain(logger): if not limited_run or (limited_run and rebuild_caches): facade_sequence.append(rebuild_unknown_affiliation_and_web_caches_facade_task.si().on_error(facade_error_handler.s()))#rebuild_unknown_affiliation_and_web_caches(session.cfg) + #logger.info(f"Facade sequence: {facade_sequence}") return chain(*facade_sequence) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade03analyzecommit.py b/augur/tasks/git/util/facade_worker/facade_worker/facade03analyzecommit.py index d6914cb97f..e62d1c089c 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade03analyzecommit.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade03analyzecommit.py @@ -165,7 +165,7 @@ def store_commit(repos_id,commit,filename, raise e - session.log_activity('Debug',f"Stored commit: {commit}") + #session.log_activity('Debug',f"Stored commit: {commit}") ### The real function starts here ### @@ -203,7 +203,7 @@ def store_commit(repos_id,commit,filename, #db_local.commit() session.execute_sql(store_working_commit) - session.log_activity('Debug',f"Stored working commit and analyzing : {commit}") + #session.log_activity('Debug',f"Stored working commit and analyzing : {commit}") for line in git_log.stdout.read().decode("utf-8",errors="ignore").split(os.linesep): if len(line) > 0: @@ -346,7 +346,7 @@ def store_commit(repos_id,commit,filename, """).bindparams(repo_id=repo_id,hash=commit) session.execute_sql(remove_commit) - session.log_activity('Debug',f"Completed and removed working commit: {commit}") + #session.log_activity('Debug',f"Completed and removed working commit: {commit}") except: session.log_activity('Info', f"Working Commit: {commit}") # If multithreading, clean up the local database diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py b/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py index 2b917cc4aa..385de4dc36 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py @@ -148,7 +148,7 @@ def discover_null_affiliations(attribution,email): f"SET cmt_{attribution}_affiliation = :affiliation " f"WHERE cmt_{attribution}_email = :email " f"AND cmt_{attribution}_affiliation IS NULL " - f"AND cmt_{attribution}_date::date >= {match['ca_start_date']}::date") + f"AND cmt_{attribution}_date::date >= \'{match['ca_start_date']}\'::date") ).bindparams(affiliation=match['ca_affiliation'],email=email) session.log_activity('Info', f"attr: {attribution} \nmatch:{match}\nsql: {update}") @@ -158,6 +158,7 @@ def discover_null_affiliations(attribution,email): except Exception as e: session.log_activity('Info', f"Error encountered: {e}") session.log_activity('Info', f"Affiliation insertion failed for {email} ") + session.log_activity('Info', f"Offending query: {update} ") def discover_alias(email): diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index 2acc440747..f47d800b82 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -6,7 +6,7 @@ @celery.task -def detect_github_repo_move(repo_git_identifiers : str) -> None: +def detect_github_repo_move(repo_git_identifiers : [str]) -> None: logger = logging.getLogger(detect_github_repo_move.__name__) logger.info(f"Starting repo_move operation with {repo_git_identifiers}") diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 78f356d915..cb8c175e91 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -15,31 +15,33 @@ @celery.task -def collect_events(repo_git: str): +def collect_events(repo_git_identifiers: [str]): logger = logging.getLogger(collect_events.__name__) - # define GithubTaskSession to handle insertions, and store oauth keys - with GithubTaskSession(logger) as session: - - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_id = repo_obj.repo_id - - owner, repo = get_owner_repo(repo_git) - - logger.info(f"Collecting Github events for {owner}/{repo}") - - url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" - - event_data = retrieve_all_event_data(repo_git, logger) - - if event_data: - - process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no events") + for repo_git in repo_git_identifiers: + # define GithubTaskSession to handle insertions, and store oauth keys + with GithubTaskSession(logger) as session: + + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id + + owner, repo = get_owner_repo(repo_git) + + logger.info(f"Collecting Github events for {owner}/{repo}") + + url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" + + event_data = retrieve_all_event_data(repo_git, logger) + + if event_data: + + process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no events") def retrieve_all_event_data(repo_git: str, logger): diff --git a/augur/tasks/github/facade_github/contributor_interfaceable/contributor_interface.py b/augur/tasks/github/facade_github/contributor_interfaceable/contributor_interface.py index 0a5f4c658b..0169fb2d54 100644 --- a/augur/tasks/github/facade_github/contributor_interfaceable/contributor_interface.py +++ b/augur/tasks/github/facade_github/contributor_interfaceable/contributor_interface.py @@ -182,9 +182,9 @@ def insert_alias(session, contributor, email): session.logger.info( f"There are more than one contributors in the table with gh_user_id={contributor['gh_user_id']}") - session.logger.info(f"Creating alias for email: {email}") + #session.logger.info(f"Creating alias for email: {email}") - session.logger.info(f"{contributor_table_data} has type {type(contributor_table_data)}") + #session.logger.info(f"{contributor_table_data} has type {type(contributor_table_data)}") # Insert a new alias that corresponds to where the contributor was found # use the email of the new alias for canonical_email if the api returns NULL # TODO: It might be better to have the canonical_email allowed to be NUll because right now it has a null constraint. @@ -283,7 +283,7 @@ def fetch_username_from_email(session, commit): # Default to failed state login_json = None - session.logger.info(f"Here is the commit: {commit}") + #session.logger.info(f"Here is the commit: {commit}") # email = commit['email_raw'] if 'email_raw' in commit else commit['email_raw'] @@ -301,11 +301,12 @@ def fetch_username_from_email(session, commit): login_json = request_dict_from_endpoint(session, url, timeout_wait=30) - session.logger.info(f"email api url {url}") + # Check if the email result got anything, if it failed try a name search. if login_json is None or 'total_count' not in login_json or login_json['total_count'] == 0: session.logger.info( f"Could not resolve the username from {commit['email_raw']}") + session.logger.info(f"email api url {url}") # Go back to failure condition login_json = None @@ -377,7 +378,7 @@ def get_login_with_supplemental_data(session, commit_data): if item['score'] > match['score']: match = item - session.logger.info( + session.logger.debug( "When searching for a contributor, we found the following users: {}\n".format(match)) return match['login'] diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index ad9dae09f0..054ee91564 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -102,8 +102,6 @@ def process_commit_metadata(session,contributorQueue,repo_id): # Use the email found in the commit data if api data is NULL emailFromCommitData = contributor['email_raw'] if 'email_raw' in contributor else contributor['email'] - session.logger.info( - f"Successfully retrieved data from github for email: {emailFromCommitData}") # Get name from commit if not found by GitHub name_field = contributor['commit_name'] if 'commit_name' in contributor else contributor['name'] @@ -152,7 +150,7 @@ def process_commit_metadata(session,contributorQueue,repo_id): #"data_source": interface.data_source } - session.logger.info(f"{cntrb}") + #session.logger.info(f"{cntrb}") except Exception as e: session.logger.info(f"Error when trying to create cntrb: {e}") @@ -227,118 +225,118 @@ def link_commits_to_contributor(session,contributorQueue): # Update the contributors table from the data facade has gathered. @celery.task -def insert_facade_contributors(repo_id): +def insert_facade_contributors(repo_id_list): logger = logging.getLogger(insert_facade_contributors.__name__) #session = GithubTaskSession(logger) with GithubTaskSession(logger) as session: - session.logger.info( - "Beginning process to insert contributors from facade commits for repo w entry info: {}\n".format(repo_id)) + # Get all of the commit data's emails and names from the commit table that do not appear # in the contributors table or the contributors_aliases table. - new_contrib_sql = s.sql.text(""" + + for repo_id in repo_id_list: + + session.logger.info( + "Beginning process to insert contributors from facade commits for repo w entry info: {}\n".format(repo_id)) + new_contrib_sql = s.sql.text(""" + SELECT DISTINCT + commits.cmt_author_name AS NAME, + commits.cmt_commit_hash AS hash, + commits.cmt_author_raw_email AS email_raw, + 'not_unresolved' as resolution_status + FROM + commits + WHERE + commits.repo_id = :repo_id + AND (NOT EXISTS ( SELECT contributors.cntrb_canonical FROM contributors WHERE contributors.cntrb_canonical = commits.cmt_author_raw_email ) + or NOT EXISTS ( SELECT contributors_aliases.alias_email from contributors_aliases where contributors_aliases.alias_email = commits.cmt_author_raw_email) + AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name )) + GROUP BY + commits.cmt_author_name, + commits.cmt_commit_hash, + commits.cmt_author_raw_email + UNION + SELECT DISTINCT + commits.cmt_author_name AS NAME,--commits.cmt_id AS id, + commits.cmt_commit_hash AS hash, + commits.cmt_author_raw_email AS email_raw, + 'unresolved' as resolution_status + FROM + commits + WHERE + commits.repo_id = :repo_id + AND EXISTS ( SELECT unresolved_commit_emails.email FROM unresolved_commit_emails WHERE unresolved_commit_emails.email = commits.cmt_author_raw_email ) + AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name ) + GROUP BY + commits.cmt_author_name, + commits.cmt_commit_hash, + commits.cmt_author_raw_email + ORDER BY + hash + """).bindparams(repo_id=repo_id) + + #Execute statement with session. + result = session.execute_sql(new_contrib_sql).fetchall() + new_contribs = [dict(zip(row.keys(), row)) for row in result] + + #print(new_contribs) + + #json.loads(pd.read_sql(new_contrib_sql, self.db, params={ + # 'repo_id': repo_id}).to_json(orient="records")) + + + + process_commit_metadata(session,list(new_contribs),repo_id) + + session.logger.debug("DEBUG: Got through the new_contribs") + + + with FacadeSession(logger) as session: + + for repo_id in repo_id_list: + # sql query used to find corresponding cntrb_id's of emails found in the contributor's table + # i.e., if a contributor already exists, we use it! + resolve_email_to_cntrb_id_sql = s.sql.text(""" SELECT DISTINCT - commits.cmt_author_name AS NAME, - commits.cmt_commit_hash AS hash, - commits.cmt_author_raw_email AS email_raw, - 'not_unresolved' as resolution_status + cntrb_id, + contributors.cntrb_login AS login, + contributors.cntrb_canonical AS email, + commits.cmt_author_raw_email FROM + contributors, commits WHERE - commits.repo_id = :repo_id - AND (NOT EXISTS ( SELECT contributors.cntrb_canonical FROM contributors WHERE contributors.cntrb_canonical = commits.cmt_author_raw_email ) - or NOT EXISTS ( SELECT contributors_aliases.alias_email from contributors_aliases where contributors_aliases.alias_email = commits.cmt_author_raw_email) - AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name )) - GROUP BY - commits.cmt_author_name, - commits.cmt_commit_hash, - commits.cmt_author_raw_email + contributors.cntrb_canonical = commits.cmt_author_raw_email + AND commits.repo_id = :repo_id UNION SELECT DISTINCT - commits.cmt_author_name AS NAME,--commits.cmt_id AS id, - commits.cmt_commit_hash AS hash, - commits.cmt_author_raw_email AS email_raw, - 'unresolved' as resolution_status + contributors_aliases.cntrb_id, + contributors.cntrb_login as login, + contributors_aliases.alias_email AS email, + commits.cmt_author_raw_email FROM + contributors, + contributors_aliases, commits WHERE - commits.repo_id = :repo_id - AND EXISTS ( SELECT unresolved_commit_emails.email FROM unresolved_commit_emails WHERE unresolved_commit_emails.email = commits.cmt_author_raw_email ) - AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name ) - GROUP BY - commits.cmt_author_name, - commits.cmt_commit_hash, - commits.cmt_author_raw_email - ORDER BY - hash - """).bindparams(repo_id=repo_id) - - #Execute statement with session. - result = session.execute_sql(new_contrib_sql).fetchall() - new_contribs = [dict(zip(row.keys(), row)) for row in result] - - #print(new_contribs) - - #json.loads(pd.read_sql(new_contrib_sql, self.db, params={ - # 'repo_id': repo_id}).to_json(orient="records")) - + contributors_aliases.alias_email = commits.cmt_author_raw_email + AND contributors.cntrb_id = contributors_aliases.cntrb_id + AND commits.repo_id = :repo_id + """).bindparams(repo_id=repo_id) - - process_commit_metadata(session,list(new_contribs),repo_id) - - session.logger.debug("DEBUG: Got through the new_contribs") + #self.logger.info("DEBUG: got passed the sql statement declaration") + # Get a list of dicts that contain the emails and cntrb_id's of commits that appear in the contributor's table. + #existing_cntrb_emails = json.loads(pd.read_sql(resolve_email_to_cntrb_id_sql, self.db, params={ + # 'repo_id': repo_id}).to_json(orient="records")) - with FacadeSession(logger) as session: - # sql query used to find corresponding cntrb_id's of emails found in the contributor's table - # i.e., if a contributor already exists, we use it! - resolve_email_to_cntrb_id_sql = s.sql.text(""" - SELECT DISTINCT - cntrb_id, - contributors.cntrb_login AS login, - contributors.cntrb_canonical AS email, - commits.cmt_author_raw_email - FROM - contributors, - commits - WHERE - contributors.cntrb_canonical = commits.cmt_author_raw_email - AND commits.repo_id = :repo_id - UNION - SELECT DISTINCT - contributors_aliases.cntrb_id, - contributors.cntrb_login as login, - contributors_aliases.alias_email AS email, - commits.cmt_author_raw_email - FROM - contributors, - contributors_aliases, - commits - WHERE - contributors_aliases.alias_email = commits.cmt_author_raw_email - AND contributors.cntrb_id = contributors_aliases.cntrb_id - AND commits.repo_id = :repo_id - """).bindparams(repo_id=repo_id) - - #self.logger.info("DEBUG: got passed the sql statement declaration") - # Get a list of dicts that contain the emails and cntrb_id's of commits that appear in the contributor's table. - #existing_cntrb_emails = json.loads(pd.read_sql(resolve_email_to_cntrb_id_sql, self.db, params={ - # 'repo_id': repo_id}).to_json(orient="records")) - - result = session.execute_sql(resolve_email_to_cntrb_id_sql).fetchall() - existing_cntrb_emails = [dict(zip(row.keys(), row)) for row in result] - - print(existing_cntrb_emails) - link_commits_to_contributor(session,list(existing_cntrb_emails)) - - session.logger.info("Done with inserting and updating facade contributors") - return - -@celery.task -def facade_grab_contribs(repo_id): - logger = logging.getLogger(facade_grab_contribs.__name__) - with FacadeSession(logger) as session: + result = session.execute_sql(resolve_email_to_cntrb_id_sql).fetchall() + existing_cntrb_emails = [dict(zip(row.keys(), row)) for row in result] - grab_committer_list(session,repo_id) + print(existing_cntrb_emails) + link_commits_to_contributor(session,list(existing_cntrb_emails)) + session.logger.info("Done with inserting and updating facade contributors") + return + diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index d75c86c279..83dbbb02bb 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -18,27 +18,29 @@ development = get_development_flag() @celery.task -def collect_issues(repo_git: str) -> None: +def collect_issues(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_issues.__name__) - owner, repo = get_owner_repo(repo_git) - # define GithubTaskSession to handle insertions, and store oauth keys - with GithubTaskSession(logger) as session: - - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_id = repo_obj.repo_id + for repo_git in repo_git_identifiers: + owner, repo = get_owner_repo(repo_git) + + # define GithubTaskSession to handle insertions, and store oauth keys + with GithubTaskSession(logger) as session: + + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id + + + issue_data = retrieve_all_issue_data(repo_git, logger) + + if issue_data: - - issue_data = retrieve_all_issue_data(repo_git, logger) - - if issue_data: - - process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) - - else: - logger.info(f"{owner}/{repo} has no issues") + process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no issues") def retrieve_all_issue_data(repo_git, logger) -> None: diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index 26a4769494..89ea3e1c6e 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -17,24 +17,25 @@ @celery.task -def collect_github_messages(repo_git: str) -> None: +def collect_github_messages(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_github_messages.__name__) - with GithubTaskSession(logger, engine) as session: - - repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id - - owner, repo = get_owner_repo(repo_git) - message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) - - if message_data: - - process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) - - else: - logger.info(f"{owner}/{repo} has no messages") + for repo_git in repo_git_identifiers: + with GithubTaskSession(logger, engine) as session: + + repo_id = session.query(Repo).filter( + Repo.repo_git == repo_git).one().repo_id + + owner, repo = get_owner_repo(repo_git) + message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) + + if message_data: + + process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no messages") def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: diff --git a/augur/tasks/github/pull_requests/commits_model/tasks.py b/augur/tasks/github/pull_requests/commits_model/tasks.py index 9a9c834e9c..e50ea9b4ea 100644 --- a/augur/tasks/github/pull_requests/commits_model/tasks.py +++ b/augur/tasks/github/pull_requests/commits_model/tasks.py @@ -7,14 +7,15 @@ @celery.task -def process_pull_request_commits(repo_git: str) -> None: +def process_pull_request_commits(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(process_pull_request_commits.__name__) - with GithubTaskSession(logger) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - pull_request_commits_model(repo.repo_id, logger) - except Exception as e: - logger.error(f"Could not complete pull_request_commits_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - raise e + for repo_git in repo_git_identifiers: + with GithubTaskSession(logger) as session: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + pull_request_commits_model(repo.repo_id, logger) + except Exception as e: + logger.error(f"Could not complete pull_request_commits_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + raise e diff --git a/augur/tasks/github/pull_requests/files_model/tasks.py b/augur/tasks/github/pull_requests/files_model/tasks.py index 6ed40811a9..fbe29795ac 100644 --- a/augur/tasks/github/pull_requests/files_model/tasks.py +++ b/augur/tasks/github/pull_requests/files_model/tasks.py @@ -6,14 +6,15 @@ from augur.application.db.util import execute_session_query @celery.task -def process_pull_request_files(repo_git: str) -> None: +def process_pull_request_files(repo_git_identifiers: str) -> None: logger = logging.getLogger(process_pull_request_files.__name__) - with GithubTaskSession(logger) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - pull_request_files_model(repo.repo_id, logger) - except Exception as e: - logger.error(f"Could not complete pull_request_files_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - #raise e \ No newline at end of file + for repo_git in repo_git_identifiers: + with GithubTaskSession(logger) as session: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + pull_request_files_model(repo.repo_id, logger) + except Exception as e: + logger.error(f"Could not complete pull_request_files_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + #raise e \ No newline at end of file diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 848a78f5b7..2b3383bcc7 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -17,22 +17,23 @@ @celery.task -def collect_pull_requests(repo_git: str) -> None: +def collect_pull_requests(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_pull_requests.__name__) - - with GithubTaskSession(logger, engine) as session: - - repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id - - owner, repo = get_owner_repo(repo_git) - pr_data = retrieve_all_pr_data(repo_git, logger) - - if pr_data: - process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no pull requests") + + for repo_git in repo_git_identifiers: + with GithubTaskSession(logger, engine) as session: + + repo_id = session.query(Repo).filter( + Repo.repo_git == repo_git).one().repo_id + + owner, repo = get_owner_repo(repo_git) + pr_data = retrieve_all_pr_data(repo_git, logger) + + if pr_data: + process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) + else: + logger.info(f"{owner}/{repo} has no pull requests") # TODO: Rename pull_request_reviewers table to pull_request_requested_reviewers diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index c4fdd96782..093a899a02 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -184,7 +184,7 @@ def releases_model(session, repo_git, repo_id): session.logger.info(f"Ran into problem when fetching data for repo {repo_git}: {e}") return - session.logger.info("repository value is: {}\n".format(data)) + #session.logger.info("repository value is: {}\n".format(data)) if 'releases' in data: if 'edges' in data['releases'] and data['releases']['edges']: for n in data['releases']['edges']: diff --git a/augur/tasks/github/repo_info/tasks.py b/augur/tasks/github/repo_info/tasks.py index 010b2114a0..c739cb49d0 100644 --- a/augur/tasks/github/repo_info/tasks.py +++ b/augur/tasks/github/repo_info/tasks.py @@ -5,16 +5,18 @@ import traceback @celery.task -def collect_repo_info(repo_git: str): +def collect_repo_info(repo_git_identifiers: [str]): logger = logging.getLogger(collect_repo_info.__name__) with GithubTaskSession(logger, engine) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - repo_info_model(session, repo) - except Exception as e: - session.logger.error(f"Could not add repo info for repo {repo.repo_id}\n Error: {e}") - session.logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) \ No newline at end of file + + for repo_git in repo_git_identifiers: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + repo_info_model(session, repo) + except Exception as e: + session.logger.error(f"Could not add repo info for repo {repo.repo_id}\n Error: {e}") + session.logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) \ No newline at end of file diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index c3d29c4535..561b3005ce 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -35,6 +35,7 @@ #Predefine phases. For new phases edit this and the config to reflect. #The domain of tasks ran should be very explicit. +@celery.task def prelim_phase(): logger = logging.getLogger(prelim_phase.__name__) @@ -44,8 +45,12 @@ def prelim_phase(): repos = execute_session_query(query, 'all') repo_git_list = [repo.repo_git for repo in repos] - return create_grouped_task_load(dataList=repo_git_list,task=detect_github_repo_move) + result = create_grouped_task_load(dataList=repo_git_list,task=detect_github_repo_move).apply_async() + + with allow_join_result(): + return result.get() +@celery.task def repo_collect_phase(): logger = logging.getLogger(repo_collect_phase.__name__) @@ -57,25 +62,38 @@ def repo_collect_phase(): with DatabaseSession(logger) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') - #Just use list comprehension for simple group - repo_info_tasks = [collect_repo_info.si(repo.repo_git) for repo in repos] - for repo in repos: - first_tasks_repo = group(collect_issues.si(repo.repo_git),collect_pull_requests.si(repo.repo_git)) - second_tasks_repo = group(collect_events.si(repo.repo_git), - collect_github_messages.si(repo.repo_git),process_pull_request_files.si(repo.repo_git), process_pull_request_commits.si(repo.repo_git)) - repo_chain = chain(first_tasks_repo,second_tasks_repo) - issue_dependent_tasks.append(repo_chain) + all_repo_git_identifiers = [repo.repo_git for repo in repos] + + #Pool the tasks for collecting repo info. + repo_info_tasks = create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_repo_info).tasks + + #pool the repo collection jobs that should be ran first and have deps. + primary_repo_jobs = group( + *create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_issues).tasks, + *create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_pull_requests).tasks + ) + + secondary_repo_jobs = group( + *create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_events).tasks, + *create_grouped_task_load(dataList=all_repo_git_identifiers,task=collect_github_messages).tasks, + *create_grouped_task_load(dataList=all_repo_git_identifiers, task=process_pull_request_files).tasks, + *create_grouped_task_load(dataList=all_repo_git_identifiers, task=process_pull_request_commits).tasks + ) + repo_task_group = group( *repo_info_tasks, - chain(group(*issue_dependent_tasks),process_contributors.si()), + chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), generate_facade_chain(logger), collect_releases.si() ) - return chain(repo_task_group, refresh_materialized_views.si()) + result = chain(repo_task_group, refresh_materialized_views.si()).apply_async() + + with allow_join_result(): + return result.get() DEFINED_COLLECTION_PHASES = [prelim_phase, repo_collect_phase] @@ -130,7 +148,7 @@ def start_data_collection(self): #Add the phase to the sequence in order as a celery task. #The preliminary task creates the larger task chain - augur_collection_sequence.append(job()) + augur_collection_sequence.append(job.si()) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) diff --git a/augur/tasks/util/worker_util.py b/augur/tasks/util/worker_util.py index 8efac1db7b..427017143a 100644 --- a/augur/tasks/util/worker_util.py +++ b/augur/tasks/util/worker_util.py @@ -29,6 +29,8 @@ def create_grouped_task_load(*args,processes=8,dataList=[],task=None): return jobs + + def wait_child_tasks(ids_list): for task_id in ids_list: prereq = AsyncResult(str(task_id)) diff --git a/docs/new-install.md b/docs/new-install.md index d2226ce235..2c6586fff2 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -16,14 +16,7 @@ 1. Obtain a GitHub Access Token: https://github.com/settings/tokens 2. Obtain a GitLab Access Token: https://gitlab.com/-/profile/personal_access_tokens 3. Python3 needs to be installed, and typically is by default on most systems. -4. There are some Git configuration parameters that help when you are cloning repos over time, and a platform prompts you for credentials when it finds a repo is deleted: -```bash - git config --global diff.renames true - git config --global diff.renameLimit 200000 - git config --global credential.helper cache - git config --global credential.helper 'cache --timeout=9999999999999' -``` -5. Make sure all of your core libraries are installed at the operating system level. Often, these days, they are by default, and its important to make sure: +4. Make sure all of your core libraries are installed at the operating system level. Often, these days, they are by default, and its important to make sure: ```bash sudo apt update sudo apt upgrade @@ -32,11 +25,11 @@ sudo apt install postgresql postgresql-contrib postgresql-client sudo apt install build-essential ``` -6. If you are running on Ubuntu 22.x right now, you will need to install python 3.9 +5. If you are running on Ubuntu 22.x right now, you will need to install python 3.9 - `sudo add-apt-repository ppa:deadsnakes/ppa` - `sudo apt install python3.9` - `sudo apt install python3.9-distutils` -7. Install pip: `sudo apt install python3-pip` and `sudo apt install python3.9-venv` +6. Install pip: `sudo apt install python3-pip` and `sudo apt install python3.9-venv` ### Docker 1. Make sure docker, and docker-compose are both installed @@ -52,6 +45,10 @@ 0. Follow the installation instructions for the database here: https://oss-augur.readthedocs.io/en/main/quick-start.html#postgresql-installation 1. Clone Augur, or clone your fork of Augur if you wish to make contributions 2. Install `redis-server` at the operating system level `sudo apt install redis-server` +3. Install rabbitmq. Instructions for installing rabbitmq can be found here: https://www.rabbitmq.com/download.html + +NOTE: To set up the rabbitmq instance and get it working see the quick-start section before running make install + 3. Make sure you have `Go` version is 1.19.3. If you don't know how to install `Go`, instructions are provided during the installation process. After following the instructions, you will need to add Go to your path for this session: `export PATH=$PATH:/usr/local/go/bin`. You should also add this to your shell's profile script. 4. Create a Python Virtual Environment `python3 -m venv ~/virtual-env-directory` (use `python3.9 -m venv` if on Ubuntu 22.04, as it defaults to python 3.10, which will not compile the machine learning workers.) 5. Activate your Python Virtual Environment `source ~/virtual-env-directory/bin/activate` diff --git a/docs/source/docker/docker-compose.rst b/docs/source/docker/docker-compose.rst index 85367b5384..a429e5a498 100644 --- a/docs/source/docker/docker-compose.rst +++ b/docs/source/docker/docker-compose.rst @@ -19,61 +19,25 @@ The default ports for each service are\: TCP/IP connections on port 5432? -Docker Compose with the script (recommended) -============================================ -This section details how to use Augur's docker-setup script to get a docker-compose deployment up and running as fast as possible. - -Running the containers ------------------------ - -.. warning:: - - Don't forget to provide your external database credentials in the ``docker_env.txt`` file or generate it within the script. `More about the configuration file here `_ - -To run Augur - -.. code-block:: bash - - sudo ./docker-setup.sh - -Answer the prompts depending on your needs. If you are using a local database it is important to use 10.254.254.254 as a hostname or localhost if prompted. If you are using the container database or the test database press 2 or 3 for the prompt answer. - -The script should automatically generate the environment variables for the docker containers and compose files. Additionally, it will set up a network alias so that the containers can communicate with localhost. Finally, it also takes care of whether or not to generate the schema to protect the integrity of any databases in use. - - -.. warning:: - - It is also important to only generate the schema if you need to otherwise your database could become unusable later on. - -Stopping the containers -------------------------- - -To stop the containers, do a keyboard interrupt while the script is running ``Ctrl+C``. The script will then ask if you want to generate log files to look at later. - -If not using the script, the standard method of stopping the containers that you started should work such as ``docker stop`` or ``docker-compose down`` - -Once you've got your container up and running, checkout out `how to use them `_ - - -Docker Compose without a script +Docker Compose =============================== -This section of the documentation details how to use Augur's Docker Compose configuration to get the full stack up and running as fast as possible without the recommended helper script. +This section of the documentation details how to use Augur's Docker Compose configuration to get the full stack up and running. .. warning:: - Don't forget to provide your external database credentials in the ``docker_env.txt`` file. Additionally an ``.env`` file is needed for the ``*.yml`` files' environment variables. Don't forget to set the variables specified in these files namely ``AUGUR_DB_TYPE`` and ``AUGUR_DB_HOST``. + Don't forget to provide your external database credentials in a file called ``.env`` file. Make sure the following environment variables are specified. + Don't specify AUGUR_DB if you want the docker database to be used. - Example docker_env.txt: + Example .env: .. code:: - AUGUR_GITHUB_API_KEY=your_key_here - AUGUR_DB_SCHEMA_BUILD=0 - AUGUR_DB_HOST=xx.xxx.xxx.xxx - AUGUR_DB_NAME=augur - AUGUR_DB_PORT=5432 - AUGUR_DB_USER=augur - AUGUR_DB_PASSWORD=somePassword + AUGUR_GITHUB_API_KEY=xxxxxxxxxxxxxxxxxxxxxxx + AUGUR_GITHUB_USERNAME=usernameGithub + AUGUR_GITLAB_API_KEY=xxxxxxxxxxxxxxxxxxxxxxx + AUGUR_GITLAB_USERNAME=usernameGitlab + AUGUR_DB=yourDBString + To run Augur **without** the database container: diff --git a/docs/source/docker/getting-started.rst b/docs/source/docker/getting-started.rst index 1d2a95c393..69c3ef59bd 100644 --- a/docs/source/docker/getting-started.rst +++ b/docs/source/docker/getting-started.rst @@ -61,17 +61,15 @@ Now that you've got your external database credentials (if you are using one) an Your database credentials and other environment variables used at runtime are stored in a file when running manually and are taken from the active bash session when using docker-compose. -You can provide your own ``docker_env.txt`` to pull from. The file should have the below format and set all the variables to some value. +You can provide your own ``.env`` file to pull from. The file should have the below format and set all the variables to some value. .. code:: - AUGUR_GITHUB_API_KEY=xxxxxxxxxxxxxxxxxxx - AUGUR_GITHUB_USERNAME=ExampleUser - AUGUR_GITLAB_API_KEY=xxxxxxxxxxxxxxxxxxx - AUGUR_GITLAB_USERNAME=ExampleUser - AUGUR_DB=postgresql://xxxx:xxxxxxxx@yourhost:5432/yourdb - REDIS_CONN_STRING=redis://yourhost:6379 - AUGUR_DB_SCHEMA_BUILD=0 + AUGUR_GITHUB_API_KEY=xxxxxxxxxxxxxxxxxxxxxxx + AUGUR_GITHUB_USERNAME=usernameGithub + AUGUR_GITLAB_API_KEY=xxxxxxxxxxxxxxxxxxxxxxx + AUGUR_GITLAB_USERNAME=usernameGitlab + AUGUR_DB=yourDBString Now that you've created your config file or are ready to generate it yourself, you're ready to `get going `_ . diff --git a/docs/source/docker/quick-start.rst b/docs/source/docker/quick-start.rst index 32146b1d4b..b271fe3dc4 100644 --- a/docs/source/docker/quick-start.rst +++ b/docs/source/docker/quick-start.rst @@ -27,29 +27,23 @@ Before you get off to such a quick start, go ahead and CREATE USER augur WITH ENCRYPTED PASSWORD 'password'; GRANT ALL PRIVILEGES ON DATABASE augur TO augur; - 2. Make sure you have an instance of redis running somewhere an external machine can access. On linux you can do this manually like this: - .. code-block:: bash - - redis-server --protected-mode no - - 3. Install Docker. If you're not familiar with Docker, their `starting guide `_ is a great resource. + 2. Install Docker and docker-compose. If you're not familiar with Docker, their `starting guide `_ is a great resource. - 4. Create a file to store all relevant enviroment variables for running docker. Below is an example file. + 3. Create a file to store all relevant enviroment variables for running docker. Below is an example file. This file should be named ``.env``` .. code-block:: - AUGUR_GITHUB_API_KEY=xxxxxxxxxxxxxxxxxxx - AUGUR_GITHUB_USERNAME=ExampleUser - AUGUR_GITLAB_API_KEY=xxxxxxxxxxxxxxxxxxx - AUGUR_GITLAB_USERNAME=ExampleUser - AUGUR_DB=postgresql://xxxx:xxxxxxxx@yourhost:5432/yourdb - REDIS_CONN_STRING=redis://yourhost:6379 + AUGUR_GITHUB_API_KEY=xxxxxxxxxxxxxxxxxxxxxxx + AUGUR_GITHUB_USERNAME=usernameGithub + AUGUR_GITLAB_API_KEY=xxxxxxxxxxxxxxxxxxxxxxx + AUGUR_GITLAB_USERNAME=usernameGitlab + AUGUR_DB=yourDBString 4. Execute the code from the base directory of the Augur repository: .. code-block:: bash sudo docker build -t augur-docker -f docker/backend/Dockerfile . - sudo docker run --name augur --env-file --add-host host.docker.internal:host-gateway -t augur-docker + sudo docker-compose up diff --git a/docs/source/getting-started/collecting-data.rst b/docs/source/getting-started/collecting-data.rst index 190c27b2f2..ea7778427b 100644 --- a/docs/source/getting-started/collecting-data.rst +++ b/docs/source/getting-started/collecting-data.rst @@ -1,7 +1,7 @@ Collecting data =============== -Now that you’ve installed Augur’s application server, it’s time to configure your data collection workers. If you just want to run Augur using the one repository in the default database, and default worker settings, all you need to do is start the redis server in one terminal, the celery worker in another terminal, and the augur application in the other terminal. (Don't forget that the AUGUR_DB needs to be set in the 2nd and 3rd terminal windows, or set permanently) +Now that you’ve installed Augur’s application server, it’s time to configure your data collection workers. If you just want to run Augur using the one repository in the default database, and default worker settings, all you need to do is start the redis server in one terminal, make sure rabbitmq is running, and the augur application in the other terminal. (Don't forget that the AUGUR_DB environment variable needs to be set in the terminal, or set permanently) .. code-block:: bash From 8a2ed64e0e2d8dc2028646b4ce7c5324474dd009 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sun, 15 Jan 2023 13:00:53 -0600 Subject: [PATCH 011/134] Update dev with main updates (#2113) * Add bash in the container (#2112) The entrypoint.sh script use bash, running with podman return Jan 09 10:12:24 augur.osci.io podman[2004482]: /usr/bin/env: 'bash': No such file or directory Signed-off-by: Michael Scherer Signed-off-by: Michael Scherer * updated CONTRIBUTING.md (#2111) Signed-off-by: WhiteWolf47 Signed-off-by: WhiteWolf47 * my first commit Signed-off-by: Glowreeyah * Hardcode bash location Signed-off-by: Michael Scherer * Update README.md * Update metadata.py Signed-off-by: Michael Scherer Signed-off-by: WhiteWolf47 Signed-off-by: Glowreeyah Co-authored-by: mscherer Co-authored-by: ANURAG BHANDARI Co-authored-by: Glowreeyah Co-authored-by: Michael Scherer --- CONTRIBUTING.md | 2 +- README.md | 4 +- docker/backend/Dockerfile | 1 + docker/backend/entrypoint.sh | 4 +- docs/source/schema/regularly_used_data.rst | 164 ++++++++++++++++----- metadata.py | 4 +- 6 files changed, 136 insertions(+), 43 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 64d2da8f5b..8b1e07b609 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,7 @@ $ cd augur/ $ git remote add upstream https://github.com/chaoss/augur.git ``` -2. Follow the [development installation instructions](https://oss-augur.readthedocs.io/en/main/development-guide/installation.html). +2. Follow the [development installation instructions](https://github.com/chaoss/augur/blob/main/docs/new-install.md). 3. Create a new branch ```bash diff --git a/README.md b/README.md index 74070da6aa..cb28127a7b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Augur NEW Release v0.43.7 +# Augur NEW Release v0.43.8 [![first-timers-only](https://img.shields.io/badge/first--timers--only-friendly-blue.svg?style=flat-square)](https://www.firsttimersonly.com/) We follow the [First Timers Only](https://www.firsttimersonly.com/) philosophy of tagging issues for first timers only, and walking one newcomer through the resolution process weekly. [You can find these issues tagged with "first timers only" on our issues list.](https://github.com/chaoss/augur/labels/first-timers-only). [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) [![Build Docker images](https://github.com/chaoss/augur/actions/workflows/build_docker.yml/badge.svg)](https://github.com/chaoss/augur/actions/workflows/build_docker.yml) [![Hits-of-Code](https://hitsofcode.com/github/chaoss/augur?branch=main)](https://hitsofcode.com/github/chaoss/augur/view?branch=main) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/2788/badge)](https://bestpractices.coreinfrastructure.org/projects/2788) @@ -6,7 +6,7 @@ ## NEW RELEASE ALERT! [If you want to jump right in, updated docker build/compose and bare metal installation instructions are available here](docs/new-install.md) -Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.43.7 +Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.43.8 - The `main` branch is a stable version of our new architecture, which features: - Dramatic improvement in the speed of large scale data collection (10,000+ repos). All data is obtained for 10k+ repos within a week - A new job management architecture that uses Celery and Redis to manage queues, and enables users to run a Flower job monitoring dashboard diff --git a/docker/backend/Dockerfile b/docker/backend/Dockerfile index cc22cb5fb7..2b62c9eaba 100644 --- a/docker/backend/Dockerfile +++ b/docker/backend/Dockerfile @@ -10,6 +10,7 @@ RUN set -x \ && apt-get update \ && apt-get -y install --no-install-recommends \ git \ + bash \ curl \ gcc \ python3-pip \ diff --git a/docker/backend/entrypoint.sh b/docker/backend/entrypoint.sh index b207c08eec..54d994a0ef 100644 --- a/docker/backend/entrypoint.sh +++ b/docker/backend/entrypoint.sh @@ -1,4 +1,4 @@ -#!/usr/bin/env bash +#!/bin/bash #SPDX-License-Identifier: MIT set -e @@ -43,4 +43,4 @@ if [[ -f /repos.csv ]]; then augur db add-repos /repos.csv fi -exec augur backend start \ No newline at end of file +exec augur backend start diff --git a/docs/source/schema/regularly_used_data.rst b/docs/source/schema/regularly_used_data.rst index 5eb9208e2d..7e6f504b53 100644 --- a/docs/source/schema/regularly_used_data.rst +++ b/docs/source/schema/regularly_used_data.rst @@ -3,14 +3,20 @@ List of Regularly Used Data Tables In Augur **This is a list of data tables in augur that are regularly used and the various workers attached to them.** - **Commits** - This is where a record for every file in every commit in every repository in an Augur instance is kept. +Commits +------- + + This is where a record for every file in every commit in every repository in an Augur instance is kept. * Worker: Facade worker collects, and also stores platform user information in the commits table. .. image:: images/commits.png :width: 200 - **Contributor_affiliations** : A list of emails and domains, with start and end dates for individuals to have an organizational affiliation. +Contributor_affiliations +------------------------ + + A list of emails and domains, with start and end dates for individuals to have an organizational affiliation. * Populated by default when augur is installed * Can be edited so that an Augur instance can resolve a larger list of affiliations. @@ -19,15 +25,21 @@ List of Regularly Used Data Tables In Augur .. image:: images/contributor_affiliations.png :width: 200 - **Contributor_repo** - Storage of a snowball sample of all the repositories anyone in your schema has accessed on GitHub. So, for example, if you wanted to know all the repositories that people on your project contributed to, this would be the table. +Contributor_repo +---------------- + + Storage of a snowball sample of all the repositories anyone in your schema has accessed on GitHub. So, for example, if you wanted to know all the repositories that people on your project contributed to, this would be the table. - * *Contributor_breadth_worker* populates this table + * Contributor_breadth_worker populates this table * Population of this table happens last, and can take a long time. .. image:: images/contributor_repo.png :width: 200 - **Contributors** - These are all the contributors to a project/repo. In Augur, all types of contributions create a contributor record. This includes issue comments, pull request comments, label addition, etc. This is different than how GitHub counts contributors; they only include committers. +Contributors +------------ + +These are all the contributors to a project/repo. In Augur, all types of contributions create a contributor record. This includes issue comments, pull request comments, label addition, etc. This is different than how GitHub counts contributors; they only include committers. * Workers Adding Contributors: @@ -40,7 +52,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/contributors.png :width: 200 - **Contributors_aliases** - These are all the alternate emails that the same contributor might use. These records arise almost entirely from the commit log. For example, if I have two different emails on two different computers that I use when I make a commit, then an alias is created for whatever the 2nd to nth email Augur runs across. If a user’s email cannot be resolved, it is placed in the unresolved_commit_emails table. Coverage is greater than 98% since Augur 1.2.4. +Contributors_aliases +-------------------- + + These are all the alternate emails that the same contributor might use. These records arise almost entirely from the commit log. For example, if I have two different emails on two different computers that I use when I make a commit, then an alias is created for whatever the 2nd to nth email Augur runs across. If a user’s email cannot be resolved, it is placed in the unresolved_commit_emails table. Coverage is greater than 98% since Augur 1.2.4. * Worker: @@ -49,7 +64,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/contributors_aliases.png :width: 200 - **Discourse_insights** - There are nine specific discourse act types identified by the computational linguistic algorithm that underlies the discourse insights worker. This worker analyzes each comment on each issue or pull request sequentially so that context is applied when determining the discourse act type. These types are: +Discourse_insights +------------------ + +There are nine specific discourse act types identified by the computational linguistic algorithm that underlies the discourse insights worker. This worker analyzes each comment on each issue or pull request sequentially so that context is applied when determining the discourse act type. These types are: * negative-reaction * answer @@ -68,8 +86,8 @@ List of Regularly Used Data Tables In Augur .. image:: images/discourse_insights.png :width: 200 - **issue_assignees || issue_events || issue_labels** - +issue_assignees || issue_events || issue_labels +---------------------------------------------- * Worker: * Github or Gitlab Issues Worker @@ -77,7 +95,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/issue_assignees.png :width: 200 - **issue_message_ref** - A link between the issue and each message stored in the message table. +issue_message_ref +----------------- + + A link between the issue and each message stored in the message table. * Worker: @@ -86,7 +107,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/issue_message_ref.png :width: 200 - **issues** - Is all the data related to a GitHub Issue. +issues +------ + + Is all the data related to a GitHub Issue. * Worker: @@ -95,12 +119,18 @@ List of Regularly Used Data Tables In Augur .. image:: images/issues.png :width: 200 - **Message** - every pull request or issue related message. These are then mapped back to either pull requests, or issues, using the __msg_ref tables +Message +------- + + Every pull request or issue related message. These are then mapped back to either pull requests, or issues, using the __msg_ref tables .. image:: images/message.png :width: 200 - **Message_analysis:** Two factors evaluated for every pull request on issues message: What is the sentiment of the message (positive or negative), and what is the novelty of the message in the context of other messages in that repository. +Message_analysis +---------------- + + Two factors evaluated for every pull request on issues message: What is the sentiment of the message (positive or negative), and what is the novelty of the message in the context of other messages in that repository. * Worker: @@ -109,7 +139,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/message_analysis.png :width: 200 - **Message_analysis_summary:** A summary level representation of the granular data in message_analysis. +Message_analysis_summary +---------------------- + + A summary level representation of the granular data in message_analysis. * Worker: @@ -118,7 +151,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/message_analysis_summary.png :width: 200 - **Platform:** Reference data with two rows: one for GitHub, one for GitLab. +Platform +--------- + + Reference data with two rows: one for GitHub, one for GitLab. * Worker: @@ -127,7 +163,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/platform.png :width: 200 - **Pull_request_analysis:** A representation of the probability of a pull request being merged into a repository, based on analysis of the properties of previously merged pull requests in a repository. (Machine learning worker) +Pull_request_analysis +--------------------- + + A representation of the probability of a pull request being merged into a repository, based on analysis of the properties of previously merged pull requests in a repository. (Machine learning worker) * Worker: @@ -136,7 +175,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/pull_request_analysis.png :width: 200 - **pull_request_assignees || pull_request_commits || pull_request_events || pull_request_files || pull_request_labels || pull_request_message_ref** - All the data related to pull requests. Every pull request will be in the pull_requests data. +pull_request_assignees || pull_request_commits || pull_request_events || pull_request_files || pull_request_labels || pull_request_message_ref +------------------------------------------------------------- + + All the data related to pull requests. Every pull request will be in the pull_requests data. .. image:: images/pull_request_assignees.png :width: 200 @@ -156,7 +198,9 @@ List of Regularly Used Data Tables In Augur .. image:: images/pull_request_ref.png :width: 200 - **pull_request_meta || pull_request_repo || pull_request_review_message_ref || pull_request_reviewers || pull_request_reviews || pull_request_teams || pull_requests** - All the data related to pull requests. Every pull request will be in the pull_requests data. +pull_request_meta || pull_request_repo || pull_request_review_message_ref || pull_request_reviewers || pull_request_reviews || pull_request_teams || pull_requests +------------------------- + All the data related to pull requests. Every pull request will be in the pull_requests data. .. image:: images/pull_request.png :width: 200 @@ -179,7 +223,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/pull_request_teams.png :width: 200 - **Releases:** Github declared software releases or release tags. For example: https://github.com/chaoss/augur/releases +Releases +-------- + + Github declared software releases or release tags. For example: https://github.com/chaoss/augur/releases * Worker: @@ -188,12 +235,18 @@ List of Regularly Used Data Tables In Augur .. image:: images/releases.png :width: 200 - **Repo:** A list of all the repositories. +Repo +---- + + A list of all the repositories. .. image:: images/repo.png :width: 200 - **Repo_badging:** A list of CNCF badging information for a project. Reads this api endpoint: https://bestpractices.coreinfrastructure.org/projects.json +Repo_badging +------------ + + A list of CNCF badging information for a project. Reads this api endpoint: https://bestpractices.coreinfrastructure.org/projects.json * Worker: @@ -202,7 +255,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_badging.png :width: 200 - **Repo_cluster_messages:** Identifying which messages and repositories are clustered together. Identifies project similarity based on communication patterns. +Repo_cluster_messages +--------------------- + + Identifying which messages and repositories are clustered together. Identifies project similarity based on communication patterns. * Worker: @@ -211,7 +267,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_cluster_messages.png :width: 200 - **Repo_dependencies:** enumerates every dependency, including dependencies that are not package managed. +Repo_dependencies +----------------- + + Enumerates every dependency, including dependencies that are not package managed. * Worker: @@ -220,7 +279,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_dependencies.png :width: 200 - **Repo_deps_libyear:** (enumerates every package managed dependency) Looks up the latest release of any library that is imported into a project. Then it compares that release date, the release version of the library version in your project (and its release date), and calculates how old your version is, compared to the latest version. The resulting statistic is “libyear”. This worker runs at least once a month, so over time, you will see if your libraries are being kept up to date, or not. +Repo_deps_libyear +----------------- + + (enumerates every package managed dependency) Looks up the latest release of any library that is imported into a project. Then it compares that release date, the release version of the library version in your project (and its release date), and calculates how old your version is, compared to the latest version. The resulting statistic is “libyear”. This worker runs at least once a month, so over time, you will see if your libraries are being kept up to date, or not. * Scenarios: * If a library is updated, but you didn’t change your version, the libyear statistic gets larger @@ -233,7 +295,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_deps_libyear.png :width: 200 - **Repo_deps_scorecard:** Runs the OSSF Scorecard over every repository ( https://github.com/ossf/scorecard ) : There are 16 factors that are explained at that repository location. +Repo_deps_scorecard +--------------- + + Runs the OSSF Scorecard over every repository ( https://github.com/ossf/scorecard ) : There are 16 factors that are explained at that repository location. * Worker: @@ -242,12 +307,18 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_deps_scorecard.png :width: 200 - **Repo_groups:** reference data. The repo groups in an augur instance. +Repo_groups +--------- + + Reference data. The repo groups in an augur instance. .. image:: images/repo_groups.png :width: 200 - **Repo_info:** this worker gathers metadata from the platform API that includes things like “number of stars”, “number of forks”, etc. AND it also gives us : Number of issues, number of pull requests, etc. .. THAT information we use to determine if we have collected all of the PRs and Issues associated with a repository. +Repo_info +--------- + + This worker gathers metadata from the platform API that includes things like “number of stars”, “number of forks”, etc. AND it also gives us : Number of issues, number of pull requests, etc. .. THAT information we use to determine if we have collected all of the PRs and Issues associated with a repository. * Worker: @@ -256,7 +327,8 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_info.png :width: 200 - **Repo_insights:** +Repo_insights +----------- * Worker: @@ -265,7 +337,8 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_insights.png :width: 200 - **Repo_insights_records:** +Repo_insights_records +---------- * Worker: @@ -274,7 +347,8 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_insights_records.png :width: 200 - **Repo_labor** +Repo_labor +-------- * Worker: @@ -283,22 +357,34 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_labor.png :width: 200 - **Repo_meta:** Exists to capture repo data that may be useful in the future. Not currently populated. +Repo_meta +--------- + + Exists to capture repo data that may be useful in the future. Not currently populated. .. image:: images/repo_meta.png :width: 200 - **Repo_sbom_scans:** This table links the augur_data schema to the augur_spdx schema to keep a list of repositories that need licenses scanned. (These are for file level license declarations, which are common in Linux Foundation projects, but otherwise not in wide use). +Repo_sbom_scans +----------- + + This table links the augur_data schema to the augur_spdx schema to keep a list of repositories that need licenses scanned. (These are for file level license declarations, which are common in Linux Foundation projects, but otherwise not in wide use). .. image:: images/repo_sbom_scans.png :width: 200 - **Repo_stats:** Exists to capture repo data that may be useful in the future. Not currently populated. +Repo_stats +--------- + + Exists to capture repo data that may be useful in the future. Not currently populated. .. image:: images/repo_stats.png :width: 200 - **Repo_topic:** Identifies probable topics of conversation in discussion threads around issues and pull requests. +Repo_topic +------- + + Identifies probable topics of conversation in discussion threads around issues and pull requests. * Worker: @@ -307,7 +393,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/repo_topic.png :width: 200 - **Topic_words:** Unigrams, bigrams, and trigrams associated with topics in the repo_topic table. +Topic_words +--------- + + Unigrams, bigrams, and trigrams associated with topics in the repo_topic table. * Worker: @@ -316,7 +405,10 @@ List of Regularly Used Data Tables In Augur .. image:: images/topic_words.png :width: 200 - **Unresolved_commit_emails** - emails from commits that were not initially able to be resolved using automated mechanisms. +Unresolved_commit_emails +----------------------- + + Emails from commits that were not initially able to be resolved using automated mechanisms. * Worker: diff --git a/metadata.py b/metadata.py index 77aaf727de..7cdba7a8a1 100644 --- a/metadata.py +++ b/metadata.py @@ -5,8 +5,8 @@ __short_description__ = "Python 3 package for free/libre and open-source software community metrics, models & data collection" -__version__ = "0.43.7" -__release__ = "v0.43.7 (Eve 22)" +__version__ = "0.43.8" +__release__ = "v0.43.8 (Love Shack 23)" __license__ = "MIT" __copyright__ = "University of Missouri, University of Nebraska-Omaha, CHAOSS, Brian Warner & Augurlabs 2023" From a978b4f42d0dfc07f985ad56d418696e7fe5554d Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Wed, 18 Jan 2023 18:53:45 -0600 Subject: [PATCH 012/134] Add lots of frontend functionality (#2127) * Add user group functionality to repo load controller Signed-off-by: Andrew Brain * Add user group table Signed-off-by: Andrew Brain * Changes for user groups Signed-off-by: Andrew Brain * Start working on converting old dbs to new version Signed-off-by: Andrew Brain * Add script to upgrade database Signed-off-by: Andrew Brain * Fix up downgrade and upgrade script Signed-off-by: Andrew Brain * Remove prints from script Signed-off-by: Andrew Brain * Fixes to repo insertion methods Signed-off-by: Andrew Brain * First run of adding repos to groups Signed-off-by: Andrew Brain * Match the group id data types Signed-off-by: Andrew Brain * Major improvements to user group functionality Signed-off-by: Andrew Brain * Pass more repo load controller tests Signed-off-by: Andrew Brain * Move around tests for readability Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests to repo load controller Signed-off-by: Andrew Brain * Add more tests to repo load controller Signed-off-by: Andrew Brain * Fix deleting user errors Signed-off-by: Andrew Brain * Small fixes to user endpoints Signed-off-by: Andrew Brain * Add more tests for coverage Signed-off-by: Andrew Brain * Add more endpoints to get the group and repo data for the frontend Signed-off-by: Andrew Brain * Add documentation and update User endpoints: - Make ancillary arguments optional for group_repos - Add documentation clarifying new repo group endpoints * Add docs and a few fixes Signed-off-by: Andrew Brain * More oauth work * Add auth to endpoints Signed-off-by: Andrew Brain * Remove unneeded file Signed-off-by: Andrew Brain * Initial integration and testing * Convert augur view login logic to the user orm model Signed-off-by: Andrew Brain * Outline user methods on the user orm class Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Update routes to use orm functions Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Fix some login bugs Signed-off-by: Andrew Brain * Add function to paginate all repos, user repos and group repos Signed-off-by: Andrew Brain * A functions to paginate user, group and all repos Signed-off-by: Andrew Brain * Fix syntax error Signed-off-by: Andrew Brain * Fixes Signed-off-by: Andrew Brain * Fix various bugs Signed-off-by: Andrew Brain * Remove prints Signed-off-by: Andrew Brain * Make json endpoints only work when logged in Signed-off-by: Andrew Brain * Return error if user is not logged in when using the api Signed-off-by: Andrew Brain * Add more function to orm Signed-off-by: Andrew Brain * Integration work * Further integration testing and stability improvements Signed-off-by: Ulincsys <28362836a@gmail.com> * Imporove error logging and fix error when loading a user or group repos Signed-off-by: Andrew Brain * Fix small errors in user login and improve logging Signed-off-by: Andrew Brain * Fix user deletion and improve logging when there are no valid github api keys Signed-off-by: Andrew Brain * Fix get repo by id in repo model Signed-off-by: Andrew Brain * Stability improvements Signed-off-by: Ulincsys <28362836a@gmail.com> * further improvements Signed-off-by: Ulincsys <28362836a@gmail.com> * Track templates directory Signed-off-by: Ulincsys <28362836a@gmail.com> * Make default group allowed, and return user group exists if it does Signed-off-by: Andrew Brain * Fix errors in the api Signed-off-by: Andrew Brain * User function improvements Signed-off-by: Andrew Brain * Remove print Signed-off-by: Andrew Brain * Add database changes and fixes to the api Signed-off-by: Andrew Brain * Added refresh endpoint Signed-off-by: Ulincsys * Add code for refresh tokens Signed-off-by: Andrew Brain * Update auth requirements Signed-off-by: Ulincsys * Implement group favorite functionality Signed-off-by: Andrew Brain * Make session tokens expire Signed-off-by: Andrew Brain * Fix error in unathorized handler Signed-off-by: Andrew Brain * Fix refresh endpoint response Signed-off-by: Ulincsys * Fix conflicts Signed-off-by: Andrew Brain * Add reverted changes Signed-off-by: Ulincsys * Fix schema revisions Signed-off-by: Andrew Brain * Add pointer class to star, implement API toggle call Signed-off-by: Ulincsys * Fix error where frontend user recieved json Signed-off-by: Andrew Brain * Add fixes for oauth Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Signed-off-by: Ulincsys <28362836a@gmail.com> Signed-off-by: Ulincsys Co-authored-by: Ulincsys <28362836a@gmail.com> Co-authored-by: Ulincsys Co-authored-by: Sean Goggins --- .gitignore | 2 + augur/api/routes/__init__.py | 1 + augur/api/routes/batch.py | 2 +- augur/api/routes/collection_status.py | 2 +- augur/api/routes/config.py | 8 +- augur/api/routes/contributor_reports.py | 2 +- augur/api/routes/manager.py | 2 +- augur/api/routes/metadata.py | 2 +- augur/api/routes/nonstandard_metrics.py | 2 +- augur/api/routes/pull_request_reports.py | 2 +- augur/api/routes/user.py | 473 +++++++++--- augur/api/routes/util.py | 2 +- augur/api/server.py | 14 +- augur/api/util.py | 28 +- augur/api/view/.gitignore | 1 + augur/api/view/api.py | 137 ++++ augur/api/view/augur_view.py | 103 +++ augur/api/view/init.py | 147 ++++ augur/api/view/routes.py | 321 ++++++++ augur/api/view/run.sh | 11 + augur/api/view/server/Environment.py | 52 ++ augur/api/view/server/LoginException.py | 3 + augur/api/view/server/ServerThread.py | 35 + augur/api/view/server/User.py | 276 +++++++ augur/api/view/server/__init__.py | 4 + augur/api/view/url_converters.py | 27 + augur/api/view/utils.py | 439 +++++++++++ augur/application/__init__.py | 13 + augur/application/cli/backend.py | 7 +- augur/application/config.py | 5 + augur/application/db/models/__init__.py | 8 +- augur/application/db/models/augur_data.py | 21 + .../application/db/models/augur_operations.py | 527 ++++++++++++- augur/application/db/session.py | 3 +- augur/application/db/util.py | 4 +- .../versions/3_oauth_and_user_groups.py | 231 ++++++ augur/application/util.py | 29 + augur/static/css/dashboard.css | 65 ++ augur/static/css/first_time.css | 148 ++++ augur/static/css/stylesheet.css | 453 +++++++++++ .../static/favicon/android-chrome-192x192.png | Bin 0 -> 5679 bytes .../static/favicon/android-chrome-512x512.png | Bin 0 -> 12926 bytes augur/static/favicon/apple-touch-icon.png | Bin 0 -> 5236 bytes augur/static/favicon/favicon-16x16.png | Bin 0 -> 389 bytes augur/static/favicon/favicon-32x32.png | Bin 0 -> 649 bytes augur/static/favicon/favicon.ico | Bin 0 -> 15406 bytes augur/static/favicon/favicon.png | Bin 0 -> 14195 bytes augur/static/favicon/favicon_source.svg | 78 ++ augur/static/favicon/site.webmanifest | 1 + augur/static/img/Chaoss_Logo.png | Bin 0 -> 19432 bytes augur/static/img/Chaoss_Logo_white.png | Bin 0 -> 21153 bytes augur/static/img/auggie_shrug.png | Bin 0 -> 29646 bytes augur/static/img/augur_logo.png | Bin 0 -> 35590 bytes augur/static/img/augur_logo_black.png | Bin 0 -> 42763 bytes augur/static/img/notification-icon.svg | 80 ++ augur/static/js/range.js | 3 + augur/static/js/sleep.js | 4 + augur/static/js/textarea_resize.js | 12 + .../github/util/github_api_key_handler.py | 15 +- .../github/util/github_random_key_auth.py | 16 +- augur/templates/admin-dashboard.j2 | 178 +++++ augur/templates/authorization.j2 | 52 ++ augur/templates/first-time.j2 | 211 +++++ augur/templates/groups-table.j2 | 27 + augur/templates/index.j2 | 67 ++ augur/templates/loading.j2 | 14 + augur/templates/login.j2 | 155 ++++ augur/templates/navbar.j2 | 67 ++ augur/templates/new_settings.j2 | 347 +++++++++ augur/templates/notice.j2 | 6 + augur/templates/notifications.j2 | 79 ++ augur/templates/repo-commits.j2 | 0 augur/templates/repo-info.j2 | 128 +++ augur/templates/repos-card.j2 | 30 + augur/templates/repos-table.j2 | 95 +++ augur/templates/settings.j2 | 439 +++++++++++ augur/templates/settings_old.j2 | 140 ++++ augur/templates/status.j2 | 233 ++++++ augur/templates/toasts.j2 | 60 ++ augur/templates/user-group-repos-table.j2 | 113 +++ augur/util/repo_load_controller.py | 528 ++++++++++--- setup.py | 3 +- .../test_repo_load_controller/helper.py | 163 ++++ .../test_adding_orgs.py | 148 ++++ .../test_adding_repos.py | 233 ++++++ .../test_helper_functions.py | 726 ++++++++++++++++++ .../test_repo_load_controller/util.py | 146 ++++ 87 files changed, 7948 insertions(+), 261 deletions(-) create mode 100644 augur/api/view/.gitignore create mode 100644 augur/api/view/api.py create mode 100644 augur/api/view/augur_view.py create mode 100644 augur/api/view/init.py create mode 100644 augur/api/view/routes.py create mode 100755 augur/api/view/run.sh create mode 100644 augur/api/view/server/Environment.py create mode 100644 augur/api/view/server/LoginException.py create mode 100644 augur/api/view/server/ServerThread.py create mode 100644 augur/api/view/server/User.py create mode 100644 augur/api/view/server/__init__.py create mode 100644 augur/api/view/url_converters.py create mode 100644 augur/api/view/utils.py create mode 100644 augur/application/schema/alembic/versions/3_oauth_and_user_groups.py create mode 100644 augur/application/util.py create mode 100644 augur/static/css/dashboard.css create mode 100644 augur/static/css/first_time.css create mode 100644 augur/static/css/stylesheet.css create mode 100644 augur/static/favicon/android-chrome-192x192.png create mode 100644 augur/static/favicon/android-chrome-512x512.png create mode 100644 augur/static/favicon/apple-touch-icon.png create mode 100644 augur/static/favicon/favicon-16x16.png create mode 100644 augur/static/favicon/favicon-32x32.png create mode 100644 augur/static/favicon/favicon.ico create mode 100644 augur/static/favicon/favicon.png create mode 100644 augur/static/favicon/favicon_source.svg create mode 100644 augur/static/favicon/site.webmanifest create mode 100644 augur/static/img/Chaoss_Logo.png create mode 100644 augur/static/img/Chaoss_Logo_white.png create mode 100644 augur/static/img/auggie_shrug.png create mode 100644 augur/static/img/augur_logo.png create mode 100644 augur/static/img/augur_logo_black.png create mode 100644 augur/static/img/notification-icon.svg create mode 100644 augur/static/js/range.js create mode 100644 augur/static/js/sleep.js create mode 100644 augur/static/js/textarea_resize.js create mode 100644 augur/templates/admin-dashboard.j2 create mode 100644 augur/templates/authorization.j2 create mode 100644 augur/templates/first-time.j2 create mode 100644 augur/templates/groups-table.j2 create mode 100644 augur/templates/index.j2 create mode 100644 augur/templates/loading.j2 create mode 100644 augur/templates/login.j2 create mode 100644 augur/templates/navbar.j2 create mode 100644 augur/templates/new_settings.j2 create mode 100644 augur/templates/notice.j2 create mode 100644 augur/templates/notifications.j2 create mode 100644 augur/templates/repo-commits.j2 create mode 100644 augur/templates/repo-info.j2 create mode 100644 augur/templates/repos-card.j2 create mode 100644 augur/templates/repos-table.j2 create mode 100644 augur/templates/settings.j2 create mode 100644 augur/templates/settings_old.j2 create mode 100644 augur/templates/status.j2 create mode 100644 augur/templates/toasts.j2 create mode 100644 augur/templates/user-group-repos-table.j2 create mode 100644 tests/test_applicaton/test_repo_load_controller/helper.py create mode 100644 tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py create mode 100644 tests/test_applicaton/test_repo_load_controller/test_adding_repos.py create mode 100644 tests/test_applicaton/test_repo_load_controller/test_helper_functions.py create mode 100644 tests/test_applicaton/test_repo_load_controller/util.py diff --git a/.gitignore b/.gitignore index 887b67269c..7ebccb6a15 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,8 @@ augur_export_env.sh .DS_Store *.config.json !docker.config.json +config.yml +reports.yml node_modules/ diff --git a/augur/api/routes/__init__.py b/augur/api/routes/__init__.py index e69de29bb2..f4cc69cb4e 100644 --- a/augur/api/routes/__init__.py +++ b/augur/api/routes/__init__.py @@ -0,0 +1 @@ +AUGUR_API_VERSION = 'api/unstable' diff --git a/augur/api/routes/batch.py b/augur/api/routes/batch.py index a967fb4f64..bb08bbc5a1 100644 --- a/augur/api/routes/batch.py +++ b/augur/api/routes/batch.py @@ -12,7 +12,7 @@ from augur.api.util import metric_metadata import json -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION logger = logging.getLogger(__name__) diff --git a/augur/api/routes/collection_status.py b/augur/api/routes/collection_status.py index fb8ea0f318..49c62e2d76 100644 --- a/augur/api/routes/collection_status.py +++ b/augur/api/routes/collection_status.py @@ -4,7 +4,7 @@ import json from flask import Response -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION def create_routes(server): diff --git a/augur/api/routes/config.py b/augur/api/routes/config.py index 7a3d7f7014..08bb92d06b 100644 --- a/augur/api/routes/config.py +++ b/augur/api/routes/config.py @@ -17,7 +17,7 @@ logger = logging.getLogger(__name__) development = get_development_flag() -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION def generate_upgrade_request(): # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/426 @@ -28,12 +28,6 @@ def generate_upgrade_request(): return response, 426 def create_routes(server): - - @server.app.errorhandler(405) - def unsupported_method(error): - return jsonify({"status": "Unsupported method"}), 405 - - @server.app.route(f"/{AUGUR_API_VERSION}/config/get", methods=['GET', 'POST']) def get_config(): if not development and not request.is_secure: diff --git a/augur/api/routes/contributor_reports.py b/augur/api/routes/contributor_reports.py index 7425695825..0d599e6acf 100644 --- a/augur/api/routes/contributor_reports.py +++ b/augur/api/routes/contributor_reports.py @@ -18,7 +18,7 @@ from bokeh.layouts import gridplot from bokeh.transform import cumsum -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION warnings.filterwarnings('ignore') diff --git a/augur/api/routes/manager.py b/augur/api/routes/manager.py index 7624322a02..fcb5524663 100755 --- a/augur/api/routes/manager.py +++ b/augur/api/routes/manager.py @@ -17,7 +17,7 @@ import os import traceback -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION logger = logging.getLogger(__name__) diff --git a/augur/api/routes/metadata.py b/augur/api/routes/metadata.py index 7a0f1ff20a..8d4cad3c5a 100644 --- a/augur/api/routes/metadata.py +++ b/augur/api/routes/metadata.py @@ -12,7 +12,7 @@ import os import requests -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION def create_routes(server): diff --git a/augur/api/routes/nonstandard_metrics.py b/augur/api/routes/nonstandard_metrics.py index d57ce50a10..71ac2ff13a 100644 --- a/augur/api/routes/nonstandard_metrics.py +++ b/augur/api/routes/nonstandard_metrics.py @@ -11,7 +11,7 @@ # from augur.api.server import transform from augur.api.server import server -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION def create_routes(server): diff --git a/augur/api/routes/pull_request_reports.py b/augur/api/routes/pull_request_reports.py index 1cfa33c9a2..b130e403a2 100644 --- a/augur/api/routes/pull_request_reports.py +++ b/augur/api/routes/pull_request_reports.py @@ -23,7 +23,7 @@ warnings.filterwarnings('ignore') -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION def create_routes(server): def pull_request_data_collection(repo_id, start_date, end_date): diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 62539be357..3f82003471 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -1,29 +1,67 @@ #SPDX-License-Identifier: MIT """ -Creates routes for user login functionality +Creates routes for user functionality """ import logging import requests import json import os -from flask import request, Response, jsonify +import base64 +import time +import secrets +import pandas as pd +from flask import request, Response, jsonify, session +from flask_login import login_user, logout_user, current_user, login_required from werkzeug.security import generate_password_hash, check_password_hash from sqlalchemy.sql import text from sqlalchemy.orm import sessionmaker +from sqlalchemy.orm.exc import NoResultFound from augur.application.db.session import DatabaseSession from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.util.repo_load_controller import RepoLoadController +from augur.api.util import get_bearer_token +from augur.api.util import get_client_token - -from augur.application.db.models import User, UserRepo +from augur.application.db.models import User, UserRepo, UserGroup, UserSessionToken, ClientApplication, RefreshToken from augur.application.config import get_development_flag +from augur.tasks.init.redis_connection import redis_connection as redis + logger = logging.getLogger(__name__) development = get_development_flag() from augur.application.db.engine import create_database_engine Session = sessionmaker(bind=create_database_engine()) -AUGUR_API_VERSION = 'api/unstable' +from augur.api.routes import AUGUR_API_VERSION + +def api_key_required(fun): + # TODO Optionally rate-limit non authenticated users instead of rejecting requests + def wrapper(*args, **kwargs): + + client_token = get_client_token() + + # If valid: + if client_token: + + session = Session() + try: + kwargs["application"] = session.query(ClientApplication).filter(ClientApplication.api_key == client_token).one() + return fun(*args, **kwargs) + except NoResultFound: + pass + + return {"status": "Unauthorized client"} + + wrapper.__name__ = fun.__name__ + return wrapper + +# usage: +""" +@app.route("/path") +@api_key_required +def priviledged_function(): + stuff +""" # TODO This should probably be available to all endpoints def generate_upgrade_request(): @@ -35,11 +73,6 @@ def generate_upgrade_request(): return response, 426 def create_routes(server): - # TODO This functionality isn't specific to the User endpoints, and should be moved - @server.app.errorhandler(405) - def unsupported_method(error): - return jsonify({"status": "Unsupported method"}), 405 - @server.app.route(f"/{AUGUR_API_VERSION}/user/validate", methods=['POST']) def validate_user(): if not development and not request.is_secure: @@ -53,36 +86,127 @@ def validate_user(): return jsonify({"status": "Missing argument"}), 400 user = session.query(User).filter(User.login_name == username).first() - checkPassword = check_password_hash(user.login_hashword, password) if user is None: return jsonify({"status": "Invalid username"}) + + checkPassword = check_password_hash(user.login_hashword, password) if checkPassword == False: return jsonify({"status": "Invalid password"}) + + login_user(user) + return jsonify({"status": "Validated"}) + + @server.app.route(f"/{AUGUR_API_VERSION}/user/logout", methods=['POST']) + @login_required + def logout_user_func(): + if not development and not request.is_secure: + return generate_upgrade_request() + + if logout_user(): + return jsonify({"status": "Logged out"}) + + return jsonify({"status": "Error when logging out"}) + + + @server.app.route(f"/{AUGUR_API_VERSION}/user/authorize", methods=['POST', 'GET']) + @login_required + def user_authorize(): + code = secrets.token_hex() + username = current_user.login_name + + redis.set(code, username, ex=300) + + return jsonify({"status": "Validated", "code": code}) + + @server.app.route(f"/{AUGUR_API_VERSION}/user/session/generate", methods=['POST']) + @api_key_required + def generate_session(application): + code = request.args.get("code") + if not code: + return jsonify({"status": "Missing argument: code"}) + + if request.args.get("grant_type") != "code": + return jsonify({"status": "Invalid grant type"}) + + username = redis.get(code) + redis.delete(code) + if not username: + return jsonify({"status": "Invalid authorization code"}) + + user = User.get_user(username) + if not user: + return jsonify({"status": "Invalid user"}) + + seconds_to_expire = 86400 + + with DatabaseSession(logger) as session: + + existing_session = session.query(UserSessionToken).filter(UserSessionToken.user_id == user.user_id, UserSessionToken.application_id == application.id).first() + if existing_session: + existing_session.delete_refresh_tokens(session) + + + + user_session_token = UserSessionToken.create(user.user_id, application.id, seconds_to_expire).token + refresh_token = RefreshToken.create(user_session_token) + + response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) + response.headers["Cache-Control"] = "no-store" + + return response + + @server.app.route(f"/{AUGUR_API_VERSION}/user/session/refresh", methods=["GET", "POST"]) + @api_key_required + def refresh_session(application): + refresh_token_str = request.args.get("refresh_token") + + if not refresh_token_str: + return jsonify({"status": "Invalid refresh token"}) + + if request.args.get("grant_type") != "refresh_token": + return jsonify({"status": "Invalid grant type"}) + + session = Session() + refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() + if not refresh_token: + return jsonify({"status": "Invalid refresh token"}) + + if refresh_token.user_session.application == application: + return jsonify({"status": "Applications do not match"}) + + user_session = refresh_token.user_session + user = user_session.user + + new_user_session = UserSessionToken.create(user.user_id, user_session.application.id) + new_refresh_token = RefreshToken.create(new_user_session.token) + + session.delete(refresh_token) + session.delete(user_session) + session.commit() + + return jsonify({"status": "Validated", "refresh_token": new_refresh_token.id, "access_token": new_user_session.token, "expires": 86400}) + @server.app.route(f"/{AUGUR_API_VERSION}/user/query", methods=['POST']) def query_user(): if not development and not request.is_secure: return generate_upgrade_request() - session = Session() username = request.args.get("username") if username is None: - # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400 return jsonify({"status": "Missing argument"}), 400 - user = session.query(User).filter(User.login_name == username).first() - - if user is None: + + if not User.exists(username): return jsonify({"status": "Invalid username"}) return jsonify({"status": True}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/create", methods=['POST']) + @server.app.route(f"/{AUGUR_API_VERSION}/user/create", methods=['GET', 'POST']) def create_user(): if not development and not request.is_secure: return generate_upgrade_request() - session = Session() username = request.args.get("username") password = request.args.get("password") email = request.args.get("email") @@ -90,80 +214,42 @@ def create_user(): last_name = request.args.get("last_name") admin = request.args.get("create_admin") or False - if username is None or password is None or email is None: - # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400 - return jsonify({"status": "Missing argument"}), 400 - user = session.query(User).filter(User.login_name == username).first() - if user is not None: - return jsonify({"status": "User already exists"}) - emailCheck = session.query(User).filter(User.email == email).first() - if emailCheck is not None: - return jsonify({"status": "Email already exists"}) - try: - user = User(login_name = username, login_hashword = generate_password_hash(password), email = email, first_name = first_name, last_name = last_name, tool_source="User API", tool_version=None, data_source="API", admin=False) - session.add(user) - session.commit() - return jsonify({"status": "User created"}) - except AssertionError as exception_message: - return jsonify(msg='Error: {}. '.format(exception_message)), 400 + result = User.create_user(username, password, email, first_name, last_name, admin) + + return jsonify(result[1]) + @server.app.route(f"/{AUGUR_API_VERSION}/user/remove", methods=['POST', 'DELETE']) + @login_required def delete_user(): if not development and not request.is_secure: return generate_upgrade_request() - session = Session() - username = request.args.get("username") - if username is None: - return jsonify({"status": "Missing argument"}), 400 - - user = session.query(User).filter(User.login_name == username).first() - - if user is None: - return jsonify({"status": "User does not exist"}) + status = current_user.delete() + return jsonify(status) - user_repos = session.query(UserRepo).filter(UserRepo.user_id == user.user_id).all() - for repo in user_repos: - session.delete(repo) - - session.delete(user) - session.commit() - return jsonify({"status": "User deleted"}), 200 @server.app.route(f"/{AUGUR_API_VERSION}/user/update", methods=['POST']) + @login_required def update_user(): if not development and not request.is_secure: return generate_upgrade_request() - session = Session() - username = request.args.get("username") - password = request.args.get("password") email = request.args.get("email") new_login_name = request.args.get("new_username") new_password = request.args.get("new_password") - if username is None or password is None: - return jsonify({"status": "Missing argument"}), 400 - - user = session.query(User).filter(User.login_name == username).first() - if user is None: - return jsonify({"status": "User does not exist"}) - - checkPassword = check_password_hash(user.login_hashword, password) - if checkPassword == False: - return jsonify({"status": "Invalid password"}) - if email is not None: existing_user = session.query(User).filter(User.email == email).one() if existing_user is not None: return jsonify({"status": "Already an account with this email"}) - user.email = email + current_user.email = email session.commit() return jsonify({"status": "Email Updated"}) if new_password is not None: - user.login_hashword = generate_password_hash(new_password) + current_user.login_hashword = generate_password_hash(new_password) session.commit() return jsonify({"status": "Password Updated"}) @@ -172,79 +258,252 @@ def update_user(): if existing_user is not None: return jsonify({"status": "Username already taken"}) - user.login_name = new_login_name + current_user.login_name = new_login_name session.commit() return jsonify({"status": "Username Updated"}) return jsonify({"status": "Missing argument"}), 400 - @server.app.route(f"/{AUGUR_API_VERSION}/user/repos", methods=['GET', 'POST']) - def user_repos(): + + @server.app.route(f"/{AUGUR_API_VERSION}/user/repo/add", methods=['GET', 'POST']) + @login_required + def add_user_repo(): if not development and not request.is_secure: return generate_upgrade_request() - username = request.args.get("username") + repo = request.args.get("repo_url") + group_name = request.args.get("group_name") - with DatabaseSession(logger) as session: - - if username is None: - return jsonify({"status": "Missing argument"}), 400 - user = session.query(User).filter(User.login_name == username).first() - if user is None: - return jsonify({"status": "User does not exist"}) - - repo_load_controller = RepoLoadController(gh_session=session) + result = current_user.add_repo(group_name, repo) - repo_ids = repo_load_controller.get_user_repo_ids(user.user_id) + return jsonify(result[1]) - return jsonify({"status": "success", "repo_ids": repo_ids}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/add_repo", methods=['GET', 'POST']) - def add_user_repo(): + @server.app.route(f"/{AUGUR_API_VERSION}/user/group/add", methods=['GET', 'POST']) + @login_required + def add_user_group(): if not development and not request.is_secure: return generate_upgrade_request() - username = request.args.get("username") - repo = request.args.get("repo_url") + group_name = request.args.get("group_name") - with GithubTaskSession(logger) as session: + result = current_user.add_group(group_name) - if username is None: - return jsonify({"status": "Missing argument"}), 400 - user = session.query(User).filter( - User.login_name == username).first() - if user is None: - return jsonify({"status": "User does not exist"}) + return jsonify(result[1]) - repo_load_controller = RepoLoadController(gh_session=session) + @server.app.route(f"/{AUGUR_API_VERSION}/user/group/remove", methods=['GET', 'POST']) + @login_required + def remove_user_group(): + if not development and not request.is_secure: + return generate_upgrade_request() - result = repo_load_controller.add_frontend_repo(repo, user.user_id) + group_name = request.args.get("group_name") - return jsonify(result) + result = current_user.remove_group(group_name) + return jsonify(result[1]) - @server.app.route(f"/{AUGUR_API_VERSION}/user/add_org", methods=['GET', 'POST']) + + @server.app.route(f"/{AUGUR_API_VERSION}/user/org/add", methods=['GET', 'POST']) + @login_required def add_user_org(): if not development and not request.is_secure: return generate_upgrade_request() - username = request.args.get("username") org = request.args.get("org_url") + group_name = request.args.get("group_name") + + result = current_user.add_org(group_name, org) + + return jsonify(result[1]) + + + @server.app.route(f"/{AUGUR_API_VERSION}/user/repo/remove", methods=['GET', 'POST']) + @login_required + def remove_user_repo(): + if not development and not request.is_secure: + return generate_upgrade_request() + + + group_name = request.args.get("group_name") + + try: + repo_id = int(request.args.get("repo_id")) + except TypeError: + return {"status": "Repo id must be and integer"} + + result = current_user.remove_repo(group_name, repo_id) + + return jsonify(result[1]) + + @server.app.route(f"/{AUGUR_API_VERSION}/user/group/repos/", methods=['GET', 'POST']) + @login_required + def group_repos(): + """Select repos from a user group by name + + Arguments + ---------- + group_name : str + The name of the group to select + page : int = 0 -> [>= 0] + The page offset to use for pagination (optional) + page_size : int = 25 -> [> 0] + The number of result per page (optional) + sort : str + The name of the column to sort the data by (optional) + direction : str = "ASC" -> ["ASC" | "DESC"] + The direction to be used for sorting (optional) + + Returns + ------- + list + A list of dictionaries containing repos which match the given arguments + """ + + if not development and not request.is_secure: + return generate_upgrade_request() + + group_name = request.args.get("group_name") + page = request.args.get("page") or 0 + page_size = request.args.get("page_size") or 25 + sort = request.args.get("sort") or "repo_id" + direction = request.args.get("direction") or "ASC" + + result = current_user.get_group_repos(group_name, page, page_size, sort, direction) + + + result_dict = result[1] + if result[0] is not None: + + for repo in result[0]: + repo["base64_url"] = str(repo["base64_url"].decode()) + + result_dict.update({"repos": result[0]}) - with GithubTaskSession(logger) as session: + return jsonify(result_dict) - if username is None: - return jsonify({"status": "Missing argument"}), 400 - user = session.query(User).filter( - User.login_name == username).first() - if user is None: - return jsonify({"status": "User does not exist"}) + @server.app.route(f"/{AUGUR_API_VERSION}/user/group/repos/count", methods=['GET', 'POST']) + @login_required + def group_repo_count(): + """Count repos from a user group by name - repo_load_controller = RepoLoadController(gh_session=session) + Arguments + ---------- + username : str + The username of the user making the request + group_name : str + The name of the group to select - result = repo_load_controller.add_frontend_org(org, user.user_id) + Returns + ------- + int + A count of the repos in the given user group + """ - return jsonify(result) + if not development and not request.is_secure: + return generate_upgrade_request() + + group_name = request.args.get("group_name") + + result = current_user.get_group_repo_count(group_name) + + result_dict = result[1] + if result[0] is not None: + result_dict.update({"repo_count": result[0]}) + + return jsonify(result_dict) + + @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/names", methods=['GET', 'POST']) + @login_required + def get_user_groups(): + """Get a list of user groups by username + + Arguments + ---------- + username : str + The username of the user making the request + + Returns + ------- + list + A list of group names associated with the given username + """ + + if not development and not request.is_secure: + return generate_upgrade_request() + + result = current_user.get_group_names() + + return jsonify({"status": "success", "group_names": result[0]}) + + @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/repos/ids", methods=['GET', 'POST']) + @login_required + def get_user_groups_and_repos(): + """Get a list of user groups and their repos + + Returns + ------- + list + A list with this strucutre : [{"": List[str]: diff --git a/augur/api/util.py b/augur/api/util.py index 9beaf9e9cd..622932dfb5 100644 --- a/augur/api/util.py +++ b/augur/api/util.py @@ -9,6 +9,8 @@ import sys import beaker +from flask import request + __ROOT = os.path.abspath(os.path.dirname(__file__)) def get_data_path(path): """ @@ -73,4 +75,28 @@ def decorate(function): function.metadata.update(metadata) return function - return decorate \ No newline at end of file + return decorate + +""" + Extract authorization token by type from request header +""" +def get_token(token_type): + auth = request.headers.get("Authorization") + if auth: + tokens = auth.split(",") + for token in tokens: + if f"{token_type} " in token: + return token.replace(f"{token_type}", "").strip() + +""" + Extract Bearer token from request header +""" +def get_bearer_token(): + return get_token("Bearer") + +""" + Extract Client token from request header +""" +def get_client_token(): + return get_token("Client") + \ No newline at end of file diff --git a/augur/api/view/.gitignore b/augur/api/view/.gitignore new file mode 100644 index 0000000000..ad30bfec28 --- /dev/null +++ b/augur/api/view/.gitignore @@ -0,0 +1 @@ +*.yml \ No newline at end of file diff --git a/augur/api/view/api.py b/augur/api/view/api.py new file mode 100644 index 0000000000..2b9b2f5dd2 --- /dev/null +++ b/augur/api/view/api.py @@ -0,0 +1,137 @@ +from flask import Flask, render_template, render_template_string, request, abort, jsonify, redirect, url_for, session, flash +from flask_login import current_user, login_required +from augur.util.repo_load_controller import parse_org_url, parse_repo_url +from .utils import * + +def create_routes(server): + @server.app.route('/cache/file/') + @server.app.route('/cache/file/') + def cache(file=None): + if file is None: + return redirect(url_for('root', path=getSetting('caching'))) + return redirect(url_for('root', path=toCacheFilepath(file))) + + @server.app.route('/account/repos/add', methods = ['POST']) + @login_required + def av_add_user_repo(): + url = request.form.get("url") + group = request.form.get("group_name") + + if group == "None": + group = current_user.login_name + "_default" + + if not url or not group: + flash("Repo or org URL must not be empty") + elif parse_org_url(url): + current_user.add_org(group, url) + flash("Successfully added org") + elif parse_repo_url(url): + current_user.add_repo(group, url) + flash("Successfully added repo") + else: + flash("Invalid repo or org url") + + return redirect(url_for("user_settings") + "?section=tracker") + + @server.app.route('/account/update', methods = ['POST']) + @login_required + def user_update_password(): + old_password = request.form.get("password") + new_password = request.form.get("new_password") + + if current_user.update_password(old_password, new_password): + flash(f"Account {current_user.login_name} successfully updated") + else: + flash("An error occurred updating the account") + + return redirect(url_for("user_settings")) + + @server.app.route('/account/group/add', methods = ['POST']) + @login_required + def user_add_group(): + group = request.form.get("group_name") + + if not group: + flash("No group name provided") + elif current_user.add_group(group): + flash(f"Successfully added group {group}") + else: + flash("An error occurred adding group") + + return redirect(url_for("user_settings") + "?section=tracker") + + @server.app.route('/account/group/remove') + @login_required + def user_remove_group(): + group = request.args.get("group_name") + + if not group: + flash("No group name provided") + elif current_user.remove_group(group): + flash(f"Successfully removed group {group}") + else: + flash("An error occurred removing group") + + return redirect(url_for("user_settings") + "?section=tracker") + + @server.app.route('/account/repo/remove') + @login_required + def user_remove_repo(): + group = request.args.get("group_name") + repo = request.args.get("repo_id") + + if not repo: + flash("No repo id provided") + if not group: + flash("No group name provided") + + repo = int(repo) + + + if current_user.remove_repo(group, repo)[0]: + flash(f"Successfully removed repo {repo} from group {group}") + else: + flash("An error occurred removing repo from group") + + return redirect(url_for("user_group_view") + f"?group={group}") + + @server.app.route('/account/application/deauthorize') + @login_required + def user_app_deauthorize(): + token = request.args.get("token") + + if not token: + flash("No application provided") + elif current_user.invalidate_session(token): + flash("Successfully deauthorized application") + else: + flash("Invalid application token") + + return redirect(url_for("user_settings") + "?section=application") + + @server.app.route('/account/application/create', methods = ['POST']) + @login_required + def user_app_create(): + name = request.form.get("app_name") + url = request.form.get("app_url") + + if not name or not url: + flash("Must provide app name and redirect URL") + elif current_user.add_app(name, url): + flash("Successfully created app") + else: + flash("Could not create app") + + return redirect(url_for("user_settings") + "?section=application") + + + """ ---------------------------------------------------------------- + Locking request loop: + This route will lock the current request until the + report request completes. A json response is guaranteed. + Assumes that the requested repo exists. + """ + @server.app.route('/requests/report/wait/') + def wait_for_report_request(id): + requestReports(id) + return jsonify(report_requests[id]) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py new file mode 100644 index 0000000000..584088f56e --- /dev/null +++ b/augur/api/view/augur_view.py @@ -0,0 +1,103 @@ +from flask import Flask, render_template, redirect, url_for, session, request, jsonify +from flask_login import LoginManager +from .utils import * +from .url_converters import * +from .init import logger + +# from .server import User +from augur.application.db.models import User, UserSessionToken +from augur.application.db.session import DatabaseSession +from augur.api.routes import AUGUR_API_VERSION +from augur.api.util import get_bearer_token + +import time + +login_manager = LoginManager() + +def create_routes(server): + + login_manager.init_app(server.app) + + server.app.secret_key = getSetting("session_key") + + server.app.url_map.converters['list'] = ListConverter + server.app.url_map.converters['bool'] = BoolConverter + server.app.url_map.converters['json'] = JSONConverter + + # Code 404 response page, for pages not found + @server.app.errorhandler(404) + def page_not_found(error): + if AUGUR_API_VERSION in str(request.url_rule): + return jsonify({"status": "Not Found"}), 404 + + return render_template('index.j2', title='404', api_url=getSetting('serving')), 404 + + @server.app.errorhandler(405) + def unsupported_method(error): + + if AUGUR_API_VERSION in str(request.url_rule): + return jsonify({"status": "Unsupported method"}), 405 + + return render_message("405 - Method not supported", "The resource you are trying to access does not support the request method used"), 405 + + @login_manager.unauthorized_handler + def unauthorized(): + + if AUGUR_API_VERSION in str(request.url_rule): + + with DatabaseSession(logger) as db_session: + + token_str = get_bearer_token() + token = db_session.query(UserSessionToken).filter(UserSessionToken.token == token_str).first() + if not token: + return jsonify({"status": "Session expired"}) + + return jsonify({"status": "Login required"}) + + session["login_next"] = url_for(request.endpoint, **request.args) + return redirect(url_for('user_login')) + + @login_manager.user_loader + def load_user(user_id): + + user = User.get_user(user_id) + + if not user: + return None + + # The flask_login library sets a unique session["_id"] + # when login_user() is called successfully + if session.get("_id") is not None: + + user._is_authenticated = True + user._is_active = True + + return user + + @login_manager.request_loader + def load_user_request(request): + + print(f"Current time of user request: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}") + token = get_bearer_token() + print(f"Bearer token: {token}") + + with DatabaseSession(logger) as session: + + current_time = int(time.time()) + token = session.query(UserSessionToken).filter(UserSessionToken.token == token, UserSessionToken.expiration >= current_time).first() + print(f"Token: {token}") + if token: + + print("Valid user") + + user = token.user + user._is_authenticated = True + user._is_active = True + + return user + + return None + + @server.app.template_filter('as_datetime') + def as_datetime(seconds): + time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(seconds)) \ No newline at end of file diff --git a/augur/api/view/init.py b/augur/api/view/init.py new file mode 100644 index 0000000000..fd98e12338 --- /dev/null +++ b/augur/api/view/init.py @@ -0,0 +1,147 @@ +from pathlib import Path +from .server import Environment +import logging, sqlite3, secrets, hashlib, yaml + +env = Environment() + +# load configuration files and initialize globals +configFile = Path(env.setdefault("CONFIG_LOCATION", "config.yml")) + +version = {"major": 0, "minor": 0.1, "series": "Alpha"} + +report_requests = {} +settings = {} + +def init_settings(): + global settings + settings["approot"] = "/" + settings["caching"] = "static/cache/" + settings["cache_expiry"] = 604800 + settings["serving"] = "http://augur.chaoss.io/api/unstable" + settings["pagination_offset"] = 25 + settings["reports"] = "reports.yml" + settings["session_key"] = secrets.token_hex() + settings["version"] = version + +def write_settings(current_settings): + current_settings["caching"] = str(current_settings["caching"]) + + if "valid" in current_settings: + current_settings.pop("valid") + + with open(configFile, 'w') as file: + yaml.dump(current_settings, file) + +""" ---------------------------------------------------------------- +""" +def version_check(current_settings): + def to_version_string(version_object): + if version_object is None: + return "Undefined_version" + return f'{version_object["major"]}-{version_object["minor"]}-{version_object["series"]}' + + def update_from(old): + if old == None: + if "pagination_offset" not in current_settings: + current_settings["pagination_offset"] = current_settings.pop("paginationOffset") + if "session_key" not in current_settings: + current_settings["session_key"] = secrets.token_hex() + + else: + raise ValueError(f"Updating from {to_version_string(old)} to {to_version_string(version)} is unsupported") + + current_settings["version"] = version + write_settings(current_settings) + logging.info(f"Configuration updated from {to_version_string(old)} to {to_version_string(version)}") + + def compare_versions(old, new): + if old["major"] < new["major"]: + return -1, old["series"] == new["series"] + elif old["major"] > new["major"]: + return 1, old["series"] == new["series"] + elif old["minor"] < new["minor"]: + return -1, old["series"] == new["series"] + elif old["minor"] > new["minor"]: + return 1, old["series"] == new["series"] + return 0, old["series"] == new["series"] + + if "version" not in current_settings: + update_from(None) + + version_diff = compare_versions(current_settings["version"], version) + + if current_settings["version"] == version: + return + elif version_diff[0] == -1: + update_from(current_settings["version"]) + elif version_diff[0] == 1: + raise ValueError("Downgrading configuration versions is unsupported: " + + f"from {to_version_string(current_settings['version'])} to {to_version_string(version)}") + + global settings + settings = current_settings + +# default reports definition +reports = { + "pull_request_reports":[ + { + "url":"average_commits_per_PR", + "description":"Average commits per pull request" + }, + { + "url":"average_comments_per_PR", + "description":"Average comments per pull request" + }, + { + "url":"PR_counts_by_merged_status", + "description":"Pull request counts by merged status" + }, + { + "url":"mean_response_times_for_PR", + "description":"Mean response times for pull requests" + }, + { + "url":"mean_days_between_PR_comments", + "description":"Mean days between pull request comments" + }, + { + "url":"PR_time_to_first_response", + "description":"Pull request time until first response" + }, + { + "url":"average_PR_events_for_closed_PRs", + "description":"Average pull request events for closed pull requests" + }, + { + "url":"Average_PR_duration", + "description":"Average pull request duration" + } + ], + "contributor_reports":[ + { + "url":"new_contributors_bar", + "description":"New contributors bar graph" + }, + { + "url":"returning_contributors_pie_chart", + "description":"Returning contributors pie chart" + } + ], + "contributor_reports_stacked":[ + { + "url":"new_contributors_stacked_bar", + "description":"New contributors stacked bar chart" + }, + { + "url":"returning_contributors_stacked_bar", + "description":"Returning contributors stacked bar chart" + } + ] +} + +# Initialize logging +def init_logging(): + format = "%(asctime)s: %(message)s" + global logger + logger = logging.getLogger("augur view") + logger.setLevel("DEBUG") diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py new file mode 100644 index 0000000000..2600160396 --- /dev/null +++ b/augur/api/view/routes.py @@ -0,0 +1,321 @@ +import logging +from flask import Flask, render_template, render_template_string, request, abort, jsonify, redirect, url_for, session, flash +from sqlalchemy.orm.exc import NoResultFound +from .utils import * +from flask_login import login_user, logout_user, current_user, login_required + +from augur.application.db.models import User, Repo, ClientApplication +from .server import LoginException +from augur.application.db.session import DatabaseSession +from augur.tasks.init.redis_connection import redis_connection as redis +from augur.application.util import * +from augur.application.config import AugurConfig + +logger = logging.getLogger(__name__) + +with DatabaseSession(logger) as db_session: + config = AugurConfig(logger, db_session) + +# ROUTES ----------------------------------------------------------------------- + +def create_routes(server): + """ ---------------------------------------------------------------- + root: + This route returns a redirect to the application root, appended + by the provided path, if any. + """ + @server.app.route('/root/') + @server.app.route('/root/') + def root(path=""): + return redirect(getSetting("approot") + path) + + """ ---------------------------------------------------------------- + logo: + this route returns a redirect to the application logo associated + with the provided brand, otherwise the inverted Augur logo if no + brand is provided. + """ + @server.app.route('/logo/') + @server.app.route('/logo/') + def logo(brand=None): + if brand is None: + return redirect(url_for('static', filename='img/augur_logo.png')) + elif "augur" in brand: + return logo(None) + elif "chaoss" in brand: + return redirect(url_for('static', filename='img/Chaoss_Logo_white.png')) + return "" + + """ ---------------------------------------------------------------- + default: + table: + This route returns the default view of the application, which + is currently defined as the repository table view + """ + @server.app.route('/') + @server.app.route('/repos/views/table') + def repo_table_view(): + query = request.args.get('q') + try: + page = int(request.args.get('p') or 0) + except: + page = 1 + + sorting = request.args.get('s') + rev = request.args.get('r') + + if rev is not None: + if rev == "False": + rev = False + elif rev == "True": + rev = True + + direction = "DESC" if rev else "ASC" + + pagination_offset = config.get_value("frontend", "pagination_offset") + + if current_user.is_authenticated: + data = current_user.get_repos(page = page, sort = sorting, direction = direction)[0] + page_count = (current_user.get_repo_count()[0] or 0) // pagination_offset + else: + data = get_all_repos(page = page, sort = sorting, direction = direction)[0] + page_count = (get_all_repos_count()[0] or 0) // pagination_offset + + #if not cacheFileExists("repos.json"): + # return renderLoading("repos/views/table", query, "repos.json") + + # return renderRepos("table", query, data, sorting, rev, page, True) + return render_module("repos-table", title="Repos", repos=data, query_key=query, activePage=page, pages=page_count, offset=pagination_offset, PS="repo_table_view", reverse = rev, sorting = sorting) + + """ ---------------------------------------------------------------- + card: + This route returns the repository card view + """ + @server.app.route('/repos/views/card') + def repo_card_view(): + query = request.args.get('q') + if current_user.is_authenticated: + count = current_user.get_repo_count()[0] + data = current_user.get_repos(page_size = count)[0] + else: + count = get_all_repos_count()[0] + data = get_all_repos(page_size=count)[0] + + return renderRepos("card", query, data, filter = True) + + """ ---------------------------------------------------------------- + groups: + This route returns the groups table view, listing all the current + groups in the backend + """ + # @server.app.route('/groups') + # @server.app.route('/groups/') + # def repo_groups_view(group=None): + # query = request.args.get('q') + # page = request.args.get('p') + + # if(group is not None): + # query = group + + # if(query is not None): + # buffer = [] + # data = requestJson("repos") + # for repo in data: + # if query == str(repo["repo_group_id"]) or query in repo["rg_name"]: + # buffer.append(repo) + # return renderRepos("table", query, buffer, page = page, pageSource = "repo_groups_view") + # else: + # groups = requestJson("repo-groups") + # return render_template('index.html', body="groups-table", title="Groups", groups=groups, query_key=query, api_url=getSetting('serving')) + + """ ---------------------------------------------------------------- + status: + This route returns the status view, which displays information + about the current status of collection in the backend + """ + @server.app.route('/status') + def status_view(): + return render_module("status", title="Status") + + """ ---------------------------------------------------------------- + login: + Under development + """ + @server.app.route('/account/login', methods=['GET', 'POST']) + def user_login(): + if request.method == 'POST': + try: + username = request.form.get('username') + remember = request.form.get('remember') is not None + password = request.form.get('password') + register = request.form.get('register') + + if username is None: + raise LoginException("A login issue occurred") + + user = User.get_user(username) + if not user and register is None: + raise LoginException("Invalid login credentials") + + # register a user + if register is not None: + if user: + raise LoginException("User already exists") + + email = request.form.get('email') + first_name = request.form.get('first_name') + last_name = request.form.get('last_name') + admin = request.form.get('admin') or False + + result = User.create_user(username, password, email, first_name, last_name, admin) + if not result[0]: + raise LoginException("An error occurred registering your account") + else: + user = User.get_user(username) + flash(result[1]["status"]) + + # Log the user in if the password is valid + if user.validate(password) and login_user(user, remember = remember): + flash(f"Welcome, {username}!") + if "login_next" in session: + return redirect(session.pop("login_next")) + return redirect(url_for('root')) + else: + print("Invalid login") + raise LoginException("Invalid login credentials") + except LoginException as e: + flash(str(e)) + return render_module('login', title="Login") + + """ ---------------------------------------------------------------- + logout: + Under development + """ + @server.app.route('/account/logout') + @login_required + def user_logout(): + logout_user() + flash("You have been logged out") + return redirect(url_for('root')) + + """ ---------------------------------------------------------------- + default: + table: + This route performs external authorization for a user + """ + @server.app.route('/user/authorize') + @login_required + def authorize_user(): + client_id = request.args.get("client_id") + state = request.args.get("state") + response_type = request.args.get("response_type") + + if not client_id or response_type != "code": + return render_message("Invalid Request", "Something went wrong. You may need to return to the previous application and make the request again.") + + # TODO get application from client id + client = ClientApplication.get_by_id(client_id) + + return render_module("authorization", app = client, state = state) + + @server.app.route('/account/delete') + @login_required + def user_delete(): + if current_user.delete()[0]: + flash(f"Account {current_user.login_name} successfully removed") + logout_user() + else: + flash("An error occurred removing the account") + + return redirect(url_for("root")) + + """ ---------------------------------------------------------------- + settings: + Under development + """ + @server.app.route('/account/settings') + @login_required + def user_settings(): + return render_template("settings.j2") + + """ ---------------------------------------------------------------- + report page: + This route returns a report view of the requested repo (by ID). + """ + @server.app.route('/repos/views/repo/') + def repo_repo_view(id): + # For some reason, there is no reports definition (shouldn't be possible) + if reports is None: + return render_message("Report Definitions Missing", "You requested a report for a repo on this instance, but a definition for the report layout was not found.") + + repo = Repo.get_by_id(id) + + return render_module("repo-info", reports=reports.keys(), images=reports, title="Repo", repo=repo, repo_id=id) + + """ ---------------------------------------------------------------- + default: + table: + This route returns the default view of the application, which + is currently defined as the repository table view + """ + @server.app.route('/user/group/') + @login_required + def user_group_view(): + group = request.args.get("group") + + if not group: + return render_message("No Group Specified", "You must specify a group to view this page.") + + params = {} + + try: + params["page"] = int(request.args.get('p') or 0) + except: + params["page"] = 1 + + if sort := request.args.get('s'): + params["sort"] = sort + + rev = request.args.get('r') + if rev is not None: + if rev == "False": + rev = False + params["direction"] = "ASC" + elif rev == "True": + rev = True + params["direction"] = "DESC" + + pagination_offset = config.get_value("frontend", "pagination_offset") + + data = current_user.get_group_repos(group, **params)[0] + page_count = (current_user.get_group_repo_count(group)[0]) or 0 + page_count //= pagination_offset + + if not data: + return render_message("Error Loading Group", "Either the group you requested does not exist, the group has no repos, or an unspecified error occurred.") + + #if not cacheFileExists("repos.json"): + # return renderLoading("repos/views/table", query, "repos.json") + + # return renderRepos("table", None, data, sort, rev, params.get("page"), True) + return render_module("user-group-repos-table", title="Repos", repos=data, query_key=None, activePage=params["page"], pages=page_count, offset=pagination_offset, PS="user_group_view", reverse = rev, sorting = params.get("sort"), group=group) + + """ ---------------------------------------------------------------- + Admin dashboard: + View the admin dashboard. + """ + @server.app.route('/dashboard') + def dashboard_view(): + empty = [ + { "title": "Placeholder", "settings": [ + { "id": "empty", + "display_name": "Empty Entry", + "value": "NULL", + "description": "There's nothing here 👻" + } + ]} + ] + + backend_config = requestJson("config/get", False) + + return render_template('admin-dashboard.j2', sections = empty, config = backend_config) diff --git a/augur/api/view/run.sh b/augur/api/view/run.sh new file mode 100755 index 0000000000..12070743db --- /dev/null +++ b/augur/api/view/run.sh @@ -0,0 +1,11 @@ +export CONFIG_LOCATION="config.yml" +export SERVER_ADDRESS="0.0.0.0" +export SERVER_PORT="8000" + +# Notify the bootstrapper not to generate a Gunicorn config +# Also launch with the development server +export DEVELOPMENT=1 + +export TEMPLATES_AUTO_RELOAD=True + +python3 bootstrap.py diff --git a/augur/api/view/server/Environment.py b/augur/api/view/server/Environment.py new file mode 100644 index 0000000000..409a5975e5 --- /dev/null +++ b/augur/api/view/server/Environment.py @@ -0,0 +1,52 @@ +import os + +class Environment: + """ + This class is used to make dealing with environment variables easier. It + allows you to set multiple environment variables at once, and to get items + with subscript notation without needing to deal with the particularities of + non-existent values. + """ + def __init__(self, **kwargs): + for (key, value) in kwargs.items(): + self[key] = value + + def setdefault(self, key, value): + if not self[key]: + self[key] = value + return value + return self[key] + + def setall(self, **kwargs): + result = {} + for (key, value) in kwargs.items(): + if self[key]: + result[key] = self[key] + self[key] = value + + def getany(self, *args): + result = {} + for arg in args: + if self[arg]: + result[arg] = self[arg] + return result + + def as_type(self, type, key): + if self[key]: + return type(self[key]) + return None + + def __getitem__(self, key): + return os.getenv(key) + + def __setitem__(self, key, value): + os.environ[key] = str(value) + + def __len__(self)-> int: + return len(os.environ) + + def __str__(self)-> str: + return str(os.environ) + + def __iter__(self): + return (item for item in os.environ.items) \ No newline at end of file diff --git a/augur/api/view/server/LoginException.py b/augur/api/view/server/LoginException.py new file mode 100644 index 0000000000..f13a31fc06 --- /dev/null +++ b/augur/api/view/server/LoginException.py @@ -0,0 +1,3 @@ + +class LoginException(Exception): + pass diff --git a/augur/api/view/server/ServerThread.py b/augur/api/view/server/ServerThread.py new file mode 100644 index 0000000000..af3651a8f1 --- /dev/null +++ b/augur/api/view/server/ServerThread.py @@ -0,0 +1,35 @@ +from werkzeug.debug import DebuggedApplication +from werkzeug.serving import make_server +import threading + +class ServerThread(threading.Thread): + """ + Create a runnable Flask server app that automatically launches on a separate + thread. + """ + def __init__(self, app, port = 5000, address = "0.0.0.0", reraise = False): + threading.Thread.__init__(self) + + # Required to enable debugging with make_server + if reraise: + app.config['PROPAGATE_EXCEPTIONS'] = True + app.config['TESTING'] = True + app.config['DEBUG'] = True + app.config['TRAP_HTTP_EXCEPTIONS'] = True + app.config['TEMPLATES_AUTO_RELOAD'] = True + + debug_app = DebuggedApplication(app, True) + + self.server = make_server(address, port, debug_app, threaded = True) + self.ctx = app.app_context() + self.ctx.push() + + # For compatibility with subprocesses + self.terminate = self.shutdown + self.wait = self.join + + def run(self): + self.server.serve_forever() + + def shutdown(self): + self.server.shutdown() \ No newline at end of file diff --git a/augur/api/view/server/User.py b/augur/api/view/server/User.py new file mode 100644 index 0000000000..4889922ce1 --- /dev/null +++ b/augur/api/view/server/User.py @@ -0,0 +1,276 @@ +from flask_login import UserMixin +# I'm using requests here to avoid circular integration with utils +import requests, time, re + +""" ---------------------------------------------------------------- +""" +class User(UserMixin): + # User.api is set in utils.py + # User.logger is set in utils.py + + @property + def is_authenticated(self): + return self._is_authenticated + + @is_authenticated.setter + def is_authenticated(self, val): + self._is_authenticated = val + + @property + def is_active(self): + return self._is_active + + @is_active.setter + def is_active(self, val): + self._is_active = val + + @property + def is_anoymous(self): + return self._is_anoymous + + @is_anoymous.setter + def is_anoymous(self, val): + self._is_anoymous = val + + @property + def exists(self): + return self._exists + + @property + def default_group(self): + if not self.is_authenticated: + return None + elif self._default_group: + return self._default_group + + group_name = self.id + "_default" + groups = self.get_groups() + + if group_name not in groups: + if not self.add_repo_group(group_name): + User.logger.warning("Default user group does not exist, and could not be created") + return None + + self._default_group = group_name + return group_name + + def __init__(self, id): + # flask_login requires that the id be of type string + self.id = str(id) + self._exists = False + self._is_anonymous = False + self._is_authenticated = False + self._is_active = False + self._default_group = None + + # Query the server for the existence of this user + self.query_user() + + def query_user(self): + if self._exists: + # User has already been queried and verified to exist + return True + + endpoint = User.api + "/user/query" + + response = requests.post(endpoint, params = {"username": self.id}) + + if response.status_code == 200 and response.json().get("status") == True: + self._exists = True + return True + + return False + + def get_id(self): + return self.id + + def query_repos(self, group = None): + endpoint = User.api + "/user/repos" + + if not group: + group = self.default_group + + response = requests.post(endpoint, params = {"username": self.id}) + + if response.status_code == 200: + data = response.json() + if data.get("status") == "success": + return data.get("repo_ids") + else: + User.logger.warning(f"Could not get user repos: {data.get('status')}") + else: + User.logger.warning(f"Could not get user repos: {response.status_code}") + + def try_add_url(self, url, group = None): + repo = re.search("https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _]+)(.git)?\/?$", url) + org = re.search("https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/?$", url) + + if repo: + return self.add_repo(url, group) + elif org: + return self.add_org(url, group) + + return False + + def add_repo(self, url, group = None): + endpoint = User.api + "/user/add_repo" + + if not group: + group = self.default_group + + response = requests.post(endpoint, params = {"username": self.id, "repo_url": url}) + + if response.status_code == 200: + data = response.json() + if data.get("status") == "Repo Added": + return True + else: + User.logger.warning(f"Could not add user repo {url}: {data.get('status')}") + else: + User.logger.warning(f"Could not add user repo {url}: {response.status_code}") + + return False + + def add_org(self, url, group = None): + endpoint = User.api + "/user/add_org" + + response = requests.post(endpoint, params = {"username": self.id, "org_url": url}) + + if response.status_code == 200: + data = response.json() + if data.get("status") == "Org repos added": + return True + else: + User.logger.warning(f"Could not add user org {url}: {data.get('status')}") + else: + User.logger.warning(f"Could not add user org {url}: {response.status_code}") + + return False + + def get_groups(self): + endpoint = User.api + "/user/groups" + + response = requests.post(endpoint, params = {"username": self.id}) + + if response.status_code == 200: + return response.json() + else: + data = response.json() + User.logger.warning(f"Could not get user groups: {data.get('status')}") + + def add_repo_group(self, group_name): + endpoint = User.api + "/user/add_group" + + response = requests.post(endpoint, params = {"username": self.id, "group_name": group_name}) + + if response.status_code == 200: + data = response.json() + if data.get("status") == "Group created": + return True + else: + User.logger.warning(f"Could not add user group: {data.get('status')}") + else: + User.logger.warning(f"Could not add user group: {response.status_code}") + + def remove_repo_group(self, group_name): + endpoint = User.api + "/user/remove_group" + + response = requests.post(endpoint, params = {"username": self.id, "group_name": group_name}) + + if response.status_code == 200: + data = response.json() + if data.get("status") == "Group deleted": + return True + else: + User.logger.warning(f"Could not remove user group: {data.get('status')}") + else: + User.logger.warning(f"Could not remove user group: {response.status_code}") + + def select_group(self, group_name, **kwargs): + endpoint = User.api + "/user/group_repos" + + kwargs["username"] = self.id + kwargs["group_name"] = group_name + + response = requests.post(endpoint, params = kwargs) + + if response.status_code == 200: + return response.json() + elif response.status_code == 400: + data = response.json() + User.logger.warning(f"Could not select user group {group_name}: {data.get('status')}") + else: + User.logger.warning(f"Could not select user group {group_name}: {response.status_code}") + + def register(self, request): + endpoint = User.api + "/user/create" + + data = request.form.to_dict() + + # admin creation is CLI only for now + if "create_admin" in data: + data.pop("create_admin") + + response = requests.post(endpoint, params = request.form.to_dict()) + + if response.status_code == 200: + return True + elif response.status_code != 200: + User.logger.debug(f"Could not register user: {response.status_code}") + else: # :/ + User.logger.debug(f"Could not register user: {response.json()['status']}") + + return False + + def validate(self, request): + endpoint = User.api + "/user/validate" + + response = requests.post(endpoint, params = request.form.to_dict()) + + if response.status_code == 200 and response.json()["status"] == "Validated": + self._is_authenticated = True + self._is_active = True + return True + elif response.status_code != 200: + User.logger.debug(f"Could not validate user: {response.status_code}") + else: + User.logger.debug(f"Could not validate user: {response.json()['status']}") + + + # Avoid abuse by malicious actors + time.sleep(2) + return False + + def update_password(self, request): + endpoint = User.api + "/user/update" + + data = request.form.to_dict() + data["username"] = self.id + + response = requests.post(endpoint, params = data) + + if response.status_code == 200 and "Updated" in response.json()["status"]: + return True + elif response.status_code != 200: + User.logger.debug(f"Could not update user password: {response.status_code}") + else: + User.logger.debug(f"Could not update user password: {response.json()['status']}") + + return False + + def delete(self): + endpoint = User.api + "/user/remove" + + response = requests.delete(endpoint, params = {"username": self.id}) + + if response.status_code == 200: + return True + elif response.status_code != 200: + User.logger.debug(f"Could not remove user: {response.status_code}") + else: + User.logger.debug(f"Could not remove user: {response.json()['status']}") + + return False + + def __str__(self) -> str: + return f"" diff --git a/augur/api/view/server/__init__.py b/augur/api/view/server/__init__.py new file mode 100644 index 0000000000..287457c4fd --- /dev/null +++ b/augur/api/view/server/__init__.py @@ -0,0 +1,4 @@ +from .Environment import Environment +from .User import User +from .ServerThread import ServerThread +from .LoginException import LoginException diff --git a/augur/api/view/url_converters.py b/augur/api/view/url_converters.py new file mode 100644 index 0000000000..4d43a411f6 --- /dev/null +++ b/augur/api/view/url_converters.py @@ -0,0 +1,27 @@ +from werkzeug.routing import BaseConverter +import json + +class ListConverter(BaseConverter): + def to_python(self, value): + return value.split('+') + + def to_url(self, values): + return '+'.join(BaseConverter.to_url(value) + for value in values) + +class BoolConverter(BaseConverter): + def to_python(self, value): + if value == "False": + return False + elif value == "True": + return True + + def to_url(self, value): + return str(value) + +class JSONConverter(BaseConverter): + def to_python(self, value): + return json.loads(value) + + def to_url(self, value): + return json.dumps(value) diff --git a/augur/api/view/utils.py b/augur/api/view/utils.py new file mode 100644 index 0000000000..e6926dc3b4 --- /dev/null +++ b/augur/api/view/utils.py @@ -0,0 +1,439 @@ +from pathlib import Path +from concurrent.futures import ThreadPoolExecutor +from flask import render_template, flash, url_for +from .init import * +from .server import User +import urllib.request, urllib.error, json, os, math, yaml, urllib3, time, logging, re + +def parse_url(url): + from urllib.parse import urlparse + + # localhost is not a valid host + if "localhost" in url: + url = url.replace("localhost", "127.0.0.1") + + if not url.startswith("http"): + url = f"http://{url}" + + parts = urlparse(url) + directories = parts.path.strip('/').split('/') + queries = parts.query.strip('&').split('&') + + elements = { + 'scheme': parts.scheme, + 'netloc': parts.netloc, + 'path': parts.path, + 'params': parts.params, + 'query': parts.query, + 'fragment': parts.fragment + } + + return elements, directories, queries + +def validate_api_url(url): + from urllib.parse import urlunparse + + parts = parse_url(url)[0] + + if not parts["scheme"]: + parts["scheme"] = "http" + + staged_url = urlunparse(parts.values()) + + def is_status_ok(): + try: + with urllib.request.urlopen(staged_url) as request: + response = json.loads(request.read().decode()) + if "status" in response: + return request.url + except Exception as e: + logging.error(f"Error during serving URL verification: {str(e)}") + + return False + + status = is_status_ok() + if not status: + if "/api/unstable" not in parts["path"]: + # The URL does not point directly to the API + # try once more with a new suffix + parts["path"] = str(Path(parts["path"]).joinpath("api/unstable")) + staged_url = urlunparse(parts.values()) + + status = is_status_ok() + if not status: + # The URL does not point to a valid augur instance + return "" + else: + return status + else: + return "" + + return status + + +""" ---------------------------------------------------------------- +loadSettings: + This function attempts to load the application settings from the config file + (defined in init.py). It is assumed that the filename or file path defined + during initialization is sufficient to locate the config file, and that the + current process has read access to that file. + + If loading the config file fails, default settings are loaded via + init_settings() and an attempt is made to write default settings to the + provided config file. +""" +def loadSettings(): + global settings + configFilePath = Path(configFile) + if not configFilePath.is_file(): + init_settings() + with open(configFile, 'w') as file: + logging.info(f"Generating default configuration file: {configFile}") + yaml.dump(settings, file) + logging.info("Default configuration file successfully generated.") + else: + with open(configFilePath) as file: + settings = yaml.load(file, Loader=yaml.FullLoader) + + # Ensure that the cache directory exists and is valid + cachePath = Path(settings["caching"]) + if not cachePath.is_dir(): + if cachePath.is_file(): + raise Exception(f"Cannot initialize caching: cache path [{cachePath}] is a file") + else: + try: + cachePath.mkdir(parents=True) + logging.info("cache directory initialized") + except Exception as err: + raise Exception(f"Cannot initialize caching: could not create cache directory [{cachePath}]") + + # Use the resolved path for cache directory access + settings["caching"] = cachePath + + staged_url = validate_api_url(settings["serving"]) + if staged_url: + settings["serving"] = re.sub("/$", "", staged_url) + settings["valid"] = True + else: + settings["valid"] = False + raise ValueError(f"The provided serving URL is not valid: {settings['serving']}") + +""" ---------------------------------------------------------------- +""" +def getSetting(key): + if key == "serving": + return "http://127.0.0.1:5000/api/unstable" + return settings[key] + +init_logging() + +loadSettings() + +from .init import logger + +User.api = getSetting("serving") +User.logger = logger + +version_check(settings) + +""" ---------------------------------------------------------------- +""" +def loadReports(): + global reports + try: + with open(getSetting("reports")) as file: + reports = yaml.load(file, Loader=yaml.FullLoader) + id = -1 + for report in reports: + for image in reports[report]: + image['id'] = id = id + 1 + return True + except Exception as err: + logging.error(f"An exception occurred reading reports endpoints from [{getSetting('reports')}]:") + logging.error(err) + try: + with open(getSetting("reports"), 'w') as file: + logging.info("Attempting to generate default reports.yml") + yaml.dump(reports, file) + logging.info("Default reports file successfully generated.") + except Exception as ioErr: + logging.error("Error creating default report configuration:") + logging.error(ioErr) + return False + +if not loadReports(): + loadReports() + +cache_files_requested = [] + +""" ---------------------------------------------------------------- +""" +def cacheFileExists(filename): + cache_file = Path(filename) + if cache_file.is_file(): + if(getSetting('cache_expiry') > 0): + cache_file_age = time.time() - cache_file.stat().st_mtime + if(cache_file_age > getSetting('cache_expiry')): + try: + cache_file.unlink() + logging.info(f"Cache file {filename} removed due to expiry") + return False + except Exception as e: + logging.error("Error: cache file age exceeds expiry limit, but an exception occurred while attempting to remove") + logging.error(e) + return True + else: + return False + +def stripStatic(url): + return url.replace("static/", "") + +""" ---------------------------------------------------------------- +""" +def toCacheFilename(endpoint): + return endpoint.replace("/", ".").replace("?", "_").replace("=", "_") + '.agcache' + +def toCacheFilepath(endpoint): + return getSetting('caching').joinpath(toCacheFilename(endpoint)) + +def toCacheURL(endpoint): + return getSetting('approot') + str(toCacheFilepath(endpoint)) + +""" ---------------------------------------------------------------- +requestJson: + Attempts to load JSON data from cache for the given endpoint. + If no cache file is found, a request is made to the URL for + the given endpoint and, if successful, the resulting JSON is + cached for future use. Cached files will be stored with all + '/' characters replaced with '.' for filesystem compatibility. + +@PARAM: endpoint: String + A String representation of the requested + json endpoint (relative to the api root). + +@RETURN: data: JSON + An object representing the JSON data read + from either the cache file or the enpoint + URL. Will return None if an error isreturn None + encountered. +""" +def requestJson(endpoint, cached = True): + filename = toCacheFilepath(endpoint) + requestURL = getSetting('serving') + "/" + endpoint + logging.info(f'requesting json from: {endpoint}') + try: + if cached and cacheFileExists(filename): + with open(filename) as f: + data = json.load(f) + else: + with urllib.request.urlopen(requestURL) as url: + if url.getcode() != 200: + raise urllib.error.HTTPError(code = url.getcode()) + + data = json.loads(url.read().decode()) + + if cached: + with open(filename, 'w') as f: + json.dump(data, f) + if filename in cache_files_requested: + cache_files_requested.remove(filename) + return data + except Exception as err: + logging.error("An exception occurred while fulfilling a json request") + logging.error(err) + return False, str(err) + +""" ---------------------------------------------------------------- +""" +def requestPNG(endpoint): + filename = toCacheFilepath(endpoint) + requestURL = getSetting('serving') + "/" + endpoint + try: + if cacheFileExists(filename): + return toCacheURL(endpoint) + else: + urllib.request.urlretrieve(requestURL, filename) + if filename in cache_files_requested: + cache_files_requested.remove(filename) + return toCacheURL(endpoint) + except Exception as err: + logging.error("An exception occurred while fulfilling a png request") + logging.error(err) + +""" ---------------------------------------------------------------- +""" +def download(url, cmanager, filename, image_cache, image_id, repo_id = None): + image_cache[image_id] = {} + image_cache[image_id]['filename'] = filename + filename = toCacheFilepath(filename) + if cacheFileExists(filename): + image_cache[image_id]['exists'] = True + return + response = cmanager.request('GET', url) + if "json" in response.headers['Content-Type']: + logging.warn(f"repo {repo_id}: unexpected json response in image request") + logging.warn(f" response: {response.data.decode('utf-8')}") + image_cache[image_id]['exists'] = False + return + if response and response.status == 200: + image_cache[image_id]['exists'] = True + try: + with open(filename, 'wb') as f: + f.write(response.data) + except Exception as err: + logging.error("An exception occurred writing a cache file to disk") + logging.error(err) + +""" ---------------------------------------------------------------- +""" +def requestReports(repo_id): + # If this request has already been fulfilled, no need to process it again + if(repo_id in report_requests.keys()): + return + + # initialize a new request entry to hold the resulting data + report_requests[repo_id] = {} + report_requests[repo_id]['complete'] = False + + """ ---------- + If the report definition could not be loaded, we cannot determine what + files to request from the backend to compose the report. Returning here + causes the completion status of the request to be False, which will + display an error message when sent to the frontend. + """ + if reports is None: + return + + threadPools = [] + reportImages = {} + for report in reports: + # Reports is a dictionary of lists, so we get the size of each list + size = len(reports[report]) + + # Set up various threading components to manage image downloading + connection_mgr = urllib3.PoolManager(maxsize=size) + thread_pool = ThreadPoolExecutor(size) + threadPools.append(thread_pool) + + for image in reports[report]: + # Where should the downloaded image be stored (in cache) + filename = toCacheFilename(f"{image['url']}?repo_id={repo_id}") + # Where are we downloading the image from + image_url = url_for(image['url'], repo_id = repo_id) + # f"{getSetting('serving')}/{image['url']}?repo_id={repo_id}" + + # Add a request for this image to the thread pool using the download function + thread_pool.submit(download, image_url, connection_mgr, filename, reportImages, image['id'], repo_id) + + # Wait for all connections to resolve, then clean up + for thread_pool in threadPools: + thread_pool.shutdown() + + report_requests[repo_id]['images'] = reportImages + + # Remove the request from the queue when completed + report_requests[repo_id]['complete'] = True + +""" ---------------------------------------------------------------- +renderRepos: + This function renders a list of repos using a given view, while passing query + data along. This function also processes pagination automatically for the + range of data provided. If a query is provided and filtering is enabled, the + data will be filtered using the 'repo_name', 'repo_group_id' or 'rg_name'. +@PARAM: view: String + A string representing the template to use for displaying the repos. +@PARAM: query: String + The query argument from the previous page. +@PARAM: data: Dictionary + The repo data to display on the page +@PARAM: sorting: String = None + The key in the data to sort by +@PARAM: rev: Boolean = False + Determines if the sorted data should be displayed in descending order +@PARAM: page: String = None + The current page to use within pagination +@PARAM: filter: Boolean = False + Filter data using query +@PARAM: pageSource: String = "repos/views/table" + The base url to use for the page links +""" +def renderRepos(view, query, data, sorting = None, rev = False, page = None, filter = False, pageSource = "repo_table_view", sortBasis = None): + pagination_offset = getSetting('pagination_offset') + + """ ---------- + If the data does not exist, we cannot construct the table to display on + site. Rendering the table module without data displays an error message + """ + if(data is None): + return render_template('index.j2', body="repos-" + view, title="Repos") + + # If a query exists and filtering is set to true, attempt to filter the data + if((query is not None) and filter): + results = [] + for repo in data: + if (query in repo["repo_name"]) or (query == str(repo["repo_group_id"])) or (query in repo["rg_name"]): + results.append(repo) + data = results + + # Determine the maximum number of pages which can be displayed from the data + pages = math.ceil(len(data) / pagination_offset) + + if page is not None: + page = int(page) + else: + page = 1 + + """ ---------- + Caller requested sorting of the data. The data is a list of dictionaries + with numerous sortable elements, and the "sorting" parameter is assumed + to be the key of the desired element in the dictionary by which to sort. + + We need the "or 0" here to ensure the comparison is valid for rows which + do not have data for the requested column (we're making the assumption + that the data type is comparable to integer 0). + """ + if sorting is not None: + try: + data = sorted(data, key = lambda i: i[sorting] or 0, reverse = rev) + except Exception as e: + flash("An error occurred during sorting") + logger.error(str(e)) + + """ ---------- + Here we extract a subset of the data for display on the site. First we + calculate the start index within the data of our current "page" (x), + then we index to that position plus the pagination offset (or page size) + defined above. The result is a list which contains *at most* a number of + entries equal to the pagination offset + """ + x = pagination_offset * (page - 1) + data = data[x: x + pagination_offset] + + return render_module("repos-" + view, title="Repos", repos=data, query_key=query, activePage=page, pages=pages, offset=pagination_offset, PS=pageSource, reverse = rev, sorting = sorting) + +""" ---------------------------------------------------------------- + Renders a simple page with the given message information, and optional page + title and redirect +""" +def render_message(messageTitle, messageBody = None, title = None, redirect = None, pause = None): + return render_module("notice", messageTitle=messageTitle, messageBody=messageBody, title=title, redirect=redirect, pause=pause) + +""" ---------------------------------------------------------------- +""" +def render_module(module, **args): + # args.setdefault("title", "Augur View") + args.setdefault("api_url", getSetting("serving")) + args.setdefault("body", module) + + if not getSetting("valid"): + args.setdefault("invalid", True) + + return render_template('index.j2', **args) + +""" ---------------------------------------------------------------- + No longer used +""" +# My attempt at a loading page +def renderLoading(dest, query, request): + cache_files_requested.append(request) + return render_template('index.j2', body="loading", title="Loading", d=dest, query_key=query, api_url=getSetting('serving')) diff --git a/augur/application/__init__.py b/augur/application/__init__.py index e69de29bb2..9091d6232a 100644 --- a/augur/application/__init__.py +++ b/augur/application/__init__.py @@ -0,0 +1,13 @@ +def requires_db_session(logger): + def inner_decorator(fun): + def wrapper(*args, **kwargs): + + from augur.application.db.session import DatabaseSession + + # create DB session + with DatabaseSession(logger) as session: + + return fun(session, *args, **kwargs) + + return wrapper + return inner_decorator diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 0d309da44f..37332435e2 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -35,7 +35,6 @@ def cli(): @cli.command("start") @click.option("--disable-collection", is_flag=True, default=False, help="Turns off data collection workers") @click.option("--development", is_flag=True, default=False, help="Enable development mode, implies --disable-collection") -@click.option("--development", is_flag=True, default=False, help="Enable development mode, implies --disable-collection") @click.option('--port') @test_connection @test_db_connection @@ -53,7 +52,6 @@ def start(disable_collection, development, port): raise e if development: - disable_collection = True os.environ["AUGUR_DEV"] = "1" logger.info("Starting in development mode") @@ -92,7 +90,10 @@ def start(disable_collection, development, port): start_task.si().apply_async() celery_command = "celery -A augur.tasks.init.celery_app.celery_app beat -l debug" - celery_beat_process = subprocess.Popen(celery_command.split(" ")) + celery_beat_process = subprocess.Popen(celery_command.split(" ")) + + else: + logger.info("Collection disabled") try: server.wait() diff --git a/augur/application/config.py b/augur/application/config.py index ee1cdff367..fb97ba31c2 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -179,6 +179,11 @@ def get_value(self, section_name: str, setting_name: str) -> Optional[Any]: Returns: The value from config if found, and None otherwise """ + + # TODO temporary until added to the DB schema + if section_name == "frontend" and setting_name == "pagination_offset": + return 25 + try: query = self.session.query(Config).filter(Config.section_name == section_name, Config.setting_name == setting_name) config_setting = execute_session_query(query, 'one') diff --git a/augur/application/db/models/__init__.py b/augur/application/db/models/__init__.py index ab9c17953b..3d9277fac2 100644 --- a/augur/application/db/models/__init__.py +++ b/augur/application/db/models/__init__.py @@ -99,5 +99,11 @@ WorkerSettingsFacade, Config, User, - UserRepo + UserRepo, + UserGroup, + UserSessionToken, + ClientApplication, + Subscription, + SubscriptionType, + RefreshToken ) diff --git a/augur/application/db/models/augur_data.py b/augur/application/db/models/augur_data.py index 63f105ef0b..9f7bdba0ec 100644 --- a/augur/application/db/models/augur_data.py +++ b/augur/application/db/models/augur_data.py @@ -21,10 +21,24 @@ from sqlalchemy.dialects.postgresql import JSONB, TIMESTAMP, UUID from sqlalchemy.orm import relationship from sqlalchemy.sql import text +import logging + from augur.application.db.models.base import Base +from augur.application import requires_db_session metadata = Base.metadata +logger = logging.getLogger(__name__) + +def get_session(): + global session + + if "session" not in globals(): + from augur.application.db.session import DatabaseSession + session = DatabaseSession(logger) + + return session + t_analysis_log = Table( "analysis_log", @@ -813,8 +827,15 @@ class Repo(Base): ) repo_group = relationship("RepoGroup") + user_repo = relationship("UserRepo") + + @staticmethod + def get_by_id(repo_id): + local_session = get_session() + return local_session.query(Repo).filter(Repo.repo_id == repo_id).first() + class RepoTestCoverage(Base): __tablename__ = "repo_test_coverage" __table_args__ = {"schema": "augur_data"} diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index a83d34b143..78e2e32865 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -1,11 +1,26 @@ # coding: utf-8 from sqlalchemy import BigInteger, SmallInteger, Column, Index, Integer, String, Table, text, UniqueConstraint, Boolean, ForeignKey -from sqlalchemy.dialects.postgresql import TIMESTAMP +from sqlalchemy.dialects.postgresql import TIMESTAMP, UUID +from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.orm import relationship +from werkzeug.security import generate_password_hash, check_password_hash +import logging +import secrets from augur.application.db.models.base import Base -metadata = Base.metadata +logger = logging.getLogger(__name__) + +def get_session(): + global session + if "session" not in globals(): + from augur.application.db.session import DatabaseSession + session = DatabaseSession(logger) + + return session + +metadata = Base.metadata t_all = Table( "all", @@ -169,6 +184,7 @@ class Config(Base): # add admit column to database class User(Base): + user_id = Column(Integer, primary_key=True) login_name = Column(String, nullable=False) login_hashword = Column(String, nullable=False) @@ -190,6 +206,382 @@ class User(Base): {"schema": "augur_operations"} ) + groups = relationship("UserGroup") + tokens = relationship("UserSessionToken") + applications = relationship("ClientApplication") + + _is_authenticated = False + _is_active = True + _is_anoymous = True + + @property + def is_authenticated(self): + return self._is_authenticated + + @is_authenticated.setter + def is_authenticated(self, val): + self._is_authenticated = val + + @property + def is_active(self): + return self._is_active + + @is_active.setter + def is_active(self, val): + self._is_active = val + + @property + def is_anoymous(self): + return self._is_anoymous + + @is_anoymous.setter + def is_anoymous(self, val): + self._is_anoymous = val + + @staticmethod + def exists(username): + return User.get_user(username) is not None + + def get_id(self): + return self.login_name + + def validate(self, password) -> bool: + + if not password: + return False + + result = check_password_hash(self.login_hashword, password) + return result + + @staticmethod + def get_user(username: str): + + if not username: + return None + + local_session = get_session() + + try: + user = local_session.query(User).filter(User.login_name == username).one() + return user + except NoResultFound: + return None + + @staticmethod + def create_user(username: str, password: str, email: str, first_name:str, last_name:str, admin=False): + + if username is None or password is None or email is None or first_name is None or last_name is None: + return False, {"status": "Missing field"} + + local_session = get_session() + + user = local_session.query(User).filter(User.login_name == username).first() + if user is not None: + return False, {"status": "A User already exists with that username"} + + emailCheck = local_session.query(User).filter(User.email == email).first() + if emailCheck is not None: + return False, {"status": "A User already exists with that email"} + + try: + user = User(login_name = username, login_hashword = generate_password_hash(password), email = email, first_name = first_name, last_name = last_name, tool_source="User API", tool_version=None, data_source="API", admin=admin) + local_session.add(user) + local_session.commit() + + result = user.add_group("default") + if not result[0] and result[1]["status"] != "Group already exists": + return False, {"status": "Failed to add default group for the user"} + + return True, {"status": "Account successfully created"} + except AssertionError as exception_message: + return False, {"Error": f"{exception_message}."} + + def delete(self): + + local_session = get_session() + + for group in self.groups: + user_repos_list = group.repos + + for user_repo_entry in user_repos_list: + local_session.delete(user_repo_entry) + + local_session.delete(group) + + local_session.delete(self) + local_session.commit() + + return True, {"status": "User deleted"} + + def update_password(self, old_password, new_password): + + local_session = get_session() + + if not old_password or not new_password: + print("Need old and new password to update the password") + return False, {"status": "Need old and new password to update the password"} + + if not check_password_hash(self.login_hashword, old_password): + print("Password did not match the users password, unable to update password") + return False, {"status": "Password did not match users password"} + + self.login_hashword = generate_password_hash(new_password) + local_session.commit() + # print("Password Updated") + + return True, {"status": "Password updated"} + + def update_email(self, new_email): + + local_session = get_session() + + if not new_email: + print("Need new email to update the email") + return False, {"status": "Missing argument"} + + existing_user = local_session.query(User).filter(User.email == new_email).first() + if existing_user is not None: + print("Func: update_user. Error: Already an account with this email") + return False, {"status": "There is already an account with this email"} + + self.email = new_email + local_session.commit() + # print("Email Updated") + return True, {"status": "Email updated"} + + def update_username(self, new_username): + + local_session = get_session() + + if not new_username: + print("Need new username to update the username") + return False, {"status": "Missing argument"} + + existing_user = local_session.query(User).filter(User.login_name == new_username).first() + if existing_user is not None: + print("Func: update_user. Error: Already an account with this username") + return False, {"status": "Username already taken"} + + self.login_name = new_username + local_session.commit() + # print("Username Updated") + return True, {"status": "Username updated"} + + + def add_group(self, group_name): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + repo_load_controller = RepoLoadController(gh_session=local_session) + + result = repo_load_controller.add_user_group(self.user_id, group_name) + + return result + + def remove_group(self, group_name): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + repo_load_controller = RepoLoadController(gh_session=local_session) + + result = repo_load_controller.remove_user_group(self.user_id, group_name) + + return result + + def add_repo(self, group_name, repo_url): + + from augur.tasks.github.util.github_task_session import GithubTaskSession + from augur.util.repo_load_controller import RepoLoadController + + with GithubTaskSession(logger) as session: + + repo_load_controller = RepoLoadController(gh_session=session) + + result = repo_load_controller.add_frontend_repo(repo_url, self.user_id, group_name) + + return result + + def remove_repo(self, group_name, repo_id): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + repo_load_controller = RepoLoadController(gh_session=local_session) + + result = repo_load_controller.remove_frontend_repo(repo_id, self.user_id, group_name) + print(result) + + return result + + def add_org(self, group_name, org_url): + + from augur.tasks.github.util.github_task_session import GithubTaskSession + from augur.util.repo_load_controller import RepoLoadController + + with GithubTaskSession(logger) as session: + + repo_load_controller = RepoLoadController(gh_session=session) + + result = repo_load_controller.add_frontend_org(org_url, self.user_id, group_name) + + return result + + def get_groups(self): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + controller = RepoLoadController(local_session) + + user_groups = controller.get_user_groups(self.user_id) + + return user_groups, {"status": "success"} + + def get_group_names(self): + + user_groups = self.get_groups()[0] + + group_names = [group.name for group in user_groups] + + return group_names, {"status": "success"} + + + def get_repos(self, page=0, page_size=25, sort="repo_id", direction="ASC"): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + result = RepoLoadController(local_session).paginate_repos("user", page, page_size, sort, direction, user=self) + + return result + + def get_repo_count(self): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + controller = RepoLoadController(local_session) + + result = controller.get_repo_count(source="user", user=self) + + return result + + + def get_group_repos(self, group_name, page=0, page_size=25, sort="repo_id", direction="ASC"): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + print("Get group repos") + + result = RepoLoadController(local_session).paginate_repos("group", page, page_size, sort, direction, user=self, group_name=group_name) + + return result + + + def get_group_repo_count(self, group_name): + + from augur.util.repo_load_controller import RepoLoadController + + local_session = get_session() + + controller = RepoLoadController(local_session) + + result = controller.get_repo_count(source="group", group_name=group_name, user=self) + + return result + + def invalidate_session(self, token): + + from augur.application.db.session import DatabaseSession + + with DatabaseSession(logger) as session: + + row_count = session.query(UserSessionToken).filter(UserSessionToken.user_id == self.user_id, UserSessionToken.token == token).delete() + session.commit() + + return row_count == 1 + + def delete_app(self, app_id): + + from augur.application.db.session import DatabaseSession + + with DatabaseSession(logger) as session: + + row_count = session.query(ClientApplication).filter(ClientApplication.user_id == self.user_id, ClientApplication.id == app_id).delete() + session.commit() + + return row_count == 1 + + def add_app(self, name, redirect_url): + + from augur.application.db.session import DatabaseSession + + with DatabaseSession(logger) as session: + + try: + app = ClientApplication(id=secrets.token_hex(16), api_key=secrets.token_hex(), name=name, redirect_url=redirect_url, user_id=self.user_id) + session.add(app) + session.commit() + except Exception as e: + print(e) + return False + + return True + + def toggle_group_favorite(self, group_name): + + local_session = get_session() + + group = local_session.query(UserGroup).filter(UserGroup.name == group_name, UserGroup.user_id == self.user_id).first() + if not group: + return False, {"status": "Group does not exist"} + + group.favorited = not group.favorited + + local_session.commit() + + return True, {"status": "Success"} + + def get_favorite_groups(self): + + local_session = get_session() + + try: + groups = local_session.query(UserGroup).filter(UserGroup.user_id == self.user_id, UserGroup.favorited == True).all() + except Exception as e: + print(f"Error while trying to get favorite groups: {e}") + return None, {"status": "Error when trying to get favorite groups"} + + return groups, {"status": "Success"} + + + +class UserGroup(Base): + group_id = Column(BigInteger, primary_key=True) + user_id = Column(Integer, + ForeignKey("augur_operations.users.user_id", name="user_group_user_id_fkey") + ) + name = Column(String, nullable=False) + favorited = Column(Boolean, nullable=False, server_default=text("FALSE")) + __tablename__ = 'user_groups' + __table_args__ = ( + UniqueConstraint('user_id', 'name', name='user_group_unique'), + {"schema": "augur_operations"} + ) + + user = relationship("User") + repos = relationship("UserRepo") + class UserRepo(Base): @@ -200,10 +592,135 @@ class UserRepo(Base): } ) - user_id = Column( - ForeignKey("augur_operations.users.user_id"), primary_key=True, nullable=False + group_id = Column( + ForeignKey("augur_operations.user_groups.group_id", name="user_repo_group_id_fkey"), primary_key=True, nullable=False ) repo_id = Column( - ForeignKey("augur_data.repo.repo_id"), primary_key=True, nullable=False + ForeignKey("augur_data.repo.repo_id", name="user_repo_user_id_fkey"), primary_key=True, nullable=False + ) + + repo = relationship("Repo") + group = relationship("UserGroup") + +class UserSessionToken(Base): + __tablename__ = "user_session_tokens" + __table_args__ = ( + { + "schema": "augur_operations" + } + ) + + token = Column(String, primary_key=True, nullable=False) + user_id = Column(ForeignKey("augur_operations.users.user_id", name="user_session_token_user_id_fkey")) + expiration = Column(BigInteger) + application_id = Column(ForeignKey("augur_operations.client_applications.id", name="user_session_token_application_id_fkey"), nullable=False) + created_at = Column(BigInteger) + + user = relationship("User") + application = relationship("ClientApplication") + refresh_tokens = relationship("RefreshToken") + + @staticmethod + def create(user_id, application_id, seconds_to_expire=86400): + import time + + user_session_token = secrets.token_hex() + expiration = int(time.time()) + seconds_to_expire + + local_session = get_session() + user_session = UserSessionToken(token=user_session_token, user_id=user_id, application_id = application_id, expiration=expiration) + + local_session.add(user_session) + local_session.commit() + + return user_session + + def delete_refresh_tokens(self, session): + + refresh_tokens = self.refresh_tokens + for token in refresh_tokens: + session.delete(token) + session.commit() + + session.delete(self) + session.commit() + +class ClientApplication(Base): + __tablename__ = "client_applications" + __table_args__ = ( + { + "schema": "augur_operations" + } + ) + + id = Column(String, primary_key=True, nullable=False) + user_id = Column(ForeignKey("augur_operations.users.user_id", name="client_application_user_id_fkey"), nullable=False) + name = Column(String, nullable=False) + redirect_url = Column(String, nullable=False) + api_key = Column(String, nullable=False) + + user = relationship("User") + sessions = relationship("UserSessionToken") + subscriptions = relationship("Subscription") + + @staticmethod + def get_by_id(client_id): + + local_session = get_session() + + return local_session.query(ClientApplication).filter(ClientApplication.id == client_id).first() + + +class Subscription(Base): + __tablename__ = "subscriptions" + __table_args__ = ( + { + "schema": "augur_operations" + } ) + application_id = Column(ForeignKey("augur_operations.client_applications.id", name="subscriptions_application_id_fkey"), primary_key=True) + type_id = Column(ForeignKey("augur_operations.subscription_types.id", name="subscriptions_type_id_fkey"), primary_key=True) + + application = relationship("ClientApplication") + type = relationship("SubscriptionType") + +class SubscriptionType(Base): + __tablename__ = "subscription_types" + __table_args__ = ( + UniqueConstraint('name', name='subscription_type_title_unique'), + {"schema": "augur_operations"} + ) + + + id = Column(BigInteger, primary_key=True) + name = Column(String, nullable=False) + + subscriptions = relationship("Subscription") + + +class RefreshToken(Base): + __tablename__ = "refresh_tokens" + __table_args__ = ( + UniqueConstraint('user_session_token', name='refresh_token_user_session_token_id_unique'), + {"schema": "augur_operations"} + ) + + id = Column(String, primary_key=True) + user_session_token = Column(ForeignKey("augur_operations.user_session_tokens.token", name="refresh_token_session_token_id_fkey"), nullable=False) + + user_session = relationship("UserSessionToken") + + @staticmethod + def create(user_session_token_id): + + refresh_token_id = secrets.token_hex() + + local_session = get_session() + refresh_token = RefreshToken(id=refresh_token_id, user_session_token=user_session_token_id) + + local_session.add(refresh_token) + local_session.commit() + + return refresh_token + diff --git a/augur/application/db/session.py b/augur/application/db/session.py index 40ee08acfa..5330113ad5 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -5,7 +5,6 @@ import random import logging import json -import httpx import sqlalchemy as s from typing import Optional, List, Union @@ -202,7 +201,7 @@ def insert_data(self, data: Union[List[dict], dict], table, natural_keys: List[s if deadlock_detected is True: self.logger.error("Made it through even though Deadlock was detected") - return None + return "success" # othewise it gets the requested return columns and returns them as a list of dicts diff --git a/augur/application/db/util.py b/augur/application/db/util.py index d5c9ac0c22..c2b2500721 100644 --- a/augur/application/db/util.py +++ b/augur/application/db/util.py @@ -14,8 +14,8 @@ def catch_operational_error(func): time.sleep(240) try: return func() - except OperationalError: - pass + except OperationalError as e: + print(f"ERROR: {e}") attempts += 1 diff --git a/augur/application/schema/alembic/versions/3_oauth_and_user_groups.py b/augur/application/schema/alembic/versions/3_oauth_and_user_groups.py new file mode 100644 index 0000000000..8d75b7a709 --- /dev/null +++ b/augur/application/schema/alembic/versions/3_oauth_and_user_groups.py @@ -0,0 +1,231 @@ +"""Implemented oauth and user groups + +Revision ID: 3 +Revises: 2 +Create Date: 2022-12-19 11:00:37.509132 + +""" +import logging + +from alembic import op +import sqlalchemy as sa +from augur.application.db.session import DatabaseSession +from augur.application.db.models.augur_operations import UserGroup, UserRepo + +CLI_USER_ID = 1 + + +# revision identifiers, used by Alembic. +revision = '3' +down_revision = '2' +branch_labels = None +depends_on = None + +logger = logging.getLogger(__name__) + +def upgrade(): + + with DatabaseSession(logger) as session: + + create_user_groups_table = """ + CREATE TABLE "augur_operations"."user_groups" ( + "group_id" BIGSERIAL NOT NULL, + "user_id" int4 NOT NULL, + "name" varchar COLLATE "pg_catalog"."default" NOT NULL, + PRIMARY KEY ("group_id"), + FOREIGN KEY ("user_id") REFERENCES "augur_operations"."users" ("user_id") ON DELETE NO ACTION ON UPDATE NO ACTION, + UNIQUE ("user_id", "name") + ); + + + ALTER TABLE "augur_operations"."user_groups" + OWNER TO "augur"; + + INSERT INTO "augur_operations"."user_groups" ("group_id", "user_id", "name") VALUES (1, {}, 'default') ON CONFLICT ("user_id", "name") DO NOTHING; + ALTER SEQUENCE user_groups_group_id_seq RESTART WITH 2; + """.format(CLI_USER_ID) + + session.execute_sql(sa.sql.text(create_user_groups_table)) + + + user_repos = [] + + # create user group for all the users that have repos + user_id_query = sa.sql.text("""SELECT DISTINCT(user_id) FROM user_repos;""") + user_groups = session.fetchall_data_from_sql_text(user_id_query) + if user_groups: + + result = [] + for group in user_groups: + + user_id = group["user_id"] + + if user_id == CLI_USER_ID: + continue + + user_group_insert = sa.sql.text(f"""INSERT INTO "augur_operations"."user_groups" ("user_id", "name") VALUES ({user_id}, 'default') RETURNING group_id, user_id;""") + result.append(session.fetchall_data_from_sql_text(user_group_insert)[0]) + + # cli user mapping by default + user_group_id_mapping = {CLI_USER_ID: "1"} + for row in result: + user_group_id_mapping[row["user_id"]] = row["group_id"] + + + user_repo_query = sa.sql.text("""SELECT * FROM user_repos;""") + user_repo_data = session.fetchall_data_from_sql_text(user_repo_query) + for row in user_repo_data: + row.update({"group_id": user_group_id_mapping[row["user_id"]]}) + del row["user_id"] + user_repos.extend(user_repo_data) + + # remove data from table before modifiying it + remove_data_from_user_repos_query = sa.sql.text("""DELETE FROM user_repos;""") + session.execute_sql(remove_data_from_user_repos_query) + + + table_changes = """ + ALTER TABLE user_repos + ADD COLUMN group_id BIGINT, + ADD CONSTRAINT user_repos_group_id_fkey FOREIGN KEY (group_id) REFERENCES user_groups(group_id), + DROP COLUMN user_id, + ADD PRIMARY KEY (group_id, repo_id); + """ + + session.execute_sql(sa.sql.text(table_changes)) + + for data in user_repos: + + group_id = data["group_id"] + repo_id = data["repo_id"] + + user_repo_insert = sa.sql.text(f"""INSERT INTO "augur_operations"."user_repos" ("group_id", "repo_id") VALUES ({group_id}, {repo_id});""") + result = session.execute_sql(user_repo_insert) + + op.create_table('client_applications', + sa.Column('id', sa.String(), nullable=False), + sa.Column('api_key', sa.String(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('redirect_url', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['augur_operations.users.user_id'], name='client_application_user_id_fkey'), + sa.PrimaryKeyConstraint('id'), + schema='augur_operations' + ) + + op.create_table('user_session_tokens', + sa.Column('token', sa.String(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('created_at', sa.BigInteger(), nullable=True), + sa.Column('expiration', sa.BigInteger(), nullable=True), + sa.Column('application_id', sa.String(), nullable=True), + + sa.ForeignKeyConstraint(['application_id'], ['augur_operations.client_applications.id'], name='user_session_token_application_id_fkey'), + sa.ForeignKeyConstraint(['user_id'], ['augur_operations.users.user_id'], name='user_session_token_user_fk'), + sa.PrimaryKeyConstraint('token'), + schema='augur_operations' + ) + + op.create_table('subscription_types', + sa.Column('id', sa.BigInteger(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name', name='subscription_type_title_unique'), + schema='augur_operations' + ) + op.create_table('subscriptions', + sa.Column('application_id', sa.String(), nullable=False), + sa.Column('type_id', sa.BigInteger(), nullable=False), + sa.ForeignKeyConstraint(['application_id'], ['augur_operations.client_applications.id'], name='subscriptions_application_id_fkey'), + sa.ForeignKeyConstraint(['type_id'], ['augur_operations.subscription_types.id'], name='subscriptions_type_id_fkey'), + sa.PrimaryKeyConstraint('application_id', 'type_id'), + schema='augur_operations' + ) + + op.add_column('user_groups', sa.Column('favorited', sa.Boolean(), server_default=sa.text('FALSE'), nullable=False), schema='augur_operations') + + + op.create_table('refresh_tokens', + sa.Column('id', sa.String(), nullable=False), + sa.Column('user_session_token', sa.String(), nullable=False), + sa.ForeignKeyConstraint(['user_session_token'], ['augur_operations.user_session_tokens.token'], name='refresh_token_session_token_id_fkey'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_session_token', name='refresh_token_user_session_token_id_unique'), + schema='augur_operations' + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + + op.drop_table('refresh_tokens', schema='augur_operations') + + op.drop_column('user_groups', 'favorited', schema='augur_operations') + + op.drop_table('subscriptions', schema='augur_operations') + op.drop_table('subscription_types', schema='augur_operations') + + user_group_ids = {} + group_repo_ids = {} + with DatabaseSession(logger) as session: + user_id_query = sa.sql.text("""SELECT * FROM user_groups;""") + user_groups = session.fetchall_data_from_sql_text(user_id_query) + for row in user_groups: + try: + user_group_ids[row["user_id"]].append(row["group_id"]) + except KeyError: + user_group_ids[row["user_id"]] = [row["group_id"]] + + + group_id_query = sa.sql.text("""SELECT * FROM user_repos;""") + group_repo_id_result = session.fetchall_data_from_sql_text(group_id_query) + for row in group_repo_id_result: + try: + group_repo_ids[row["group_id"]].append(row["repo_id"]) + except KeyError: + group_repo_ids[row["group_id"]] = [row["repo_id"]] + + remove_data_from_user_repos_query = sa.sql.text("""DELETE FROM user_repos;""") + session.execute_sql(remove_data_from_user_repos_query) + + + table_changes = """ + ALTER TABLE user_repos + ADD COLUMN user_id INT, + ADD CONSTRAINT user_repos_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(user_id), + DROP COLUMN group_id, + ADD PRIMARY KEY (user_id, repo_id); + DROP TABLE user_groups; + """ + + session.execute_sql(sa.sql.text(table_changes)) + + for user_id, group_ids in user_group_ids.items(): + + repos = [] + for group_id in group_ids: + try: + repos.extend(group_repo_ids[group_id]) + except KeyError: + continue + + if repos: + + query_text_array = ["""INSERT INTO "augur_operations"."user_repos" ("repo_id", "user_id") VALUES """] + for i, repo_id in enumerate(repos): + query_text_array.append(f"({repo_id}, {user_id})") + + delimiter = ";" if i == len(repos) -1 else "," + + query_text_array.append(delimiter) + + + query_text = "".join(query_text_array) + + session.execute_sql(sa.sql.text(query_text)) + + op.drop_table('user_session_tokens', schema='augur_operations') + op.drop_table('client_applications', schema='augur_operations') + + # ### end Alembic commands ### diff --git a/augur/application/util.py b/augur/application/util.py new file mode 100644 index 0000000000..2d606804a6 --- /dev/null +++ b/augur/application/util.py @@ -0,0 +1,29 @@ +import logging + +from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.util.repo_load_controller import RepoLoadController + +logger = logging.getLogger(__name__) + +def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): + + with GithubTaskSession(logger) as session: + + controller = RepoLoadController(session) + + result = controller.paginate_repos("all", page, page_size, sort, direction) + + return result + +def get_all_repos_count(): + + with GithubTaskSession(logger) as session: + + controller = RepoLoadController(session) + + result = controller.get_repo_count(source="all") + + return result + + + diff --git a/augur/static/css/dashboard.css b/augur/static/css/dashboard.css new file mode 100644 index 0000000000..cf712777a8 --- /dev/null +++ b/augur/static/css/dashboard.css @@ -0,0 +1,65 @@ +:root { + --color-bg: #1A233A; + --color-bg-light: #272E48; + --color-fg: white; + --color-fg-contrast: black; + --color-accent: #6f42c1; + --color-accent-dark: #6134b3; + --color-notice: #00ddff; + --color-notice-contrast: #006979; +} + +body { + background-color: var(--color-bg); + color: var(--color-fg); + overflow: hidden; +} + +.content-column { + overflow-y: scroll; +} + +.dashboard-content { + background-color: var(--color-bg-light); + margin-top: 10px; + margin-bottom: 10px; +} + +.nav-pills .nav-link.active, .nav-pills .show > .nav-link { + background-color: var(--color-accent); +} + +.dashboard-sidebar { + width: 280px; + background-color: var(--color-bg-light) !important; +} + +.dashboard-form-control { + border: 1px solid #596280; + -webkit-border-radius: 2px; + -moz-border-radius: 2px; + border-radius: 2px; + font-size: .825rem; + background: #1A233A; + color: #bcd0f7; +} + +table { + background-color: var(--color-fg); + color: var(--color-fg-contrast); +} + +.toast-container { + top: 80px !important; + right: 16px !important; +} + +.toast { + background-color: var(--color-notice); + color: var(--color-fg); +} + +#toast-placeholder { + display: none; + z-index: 100; +} \ No newline at end of file diff --git a/augur/static/css/first_time.css b/augur/static/css/first_time.css new file mode 100644 index 0000000000..12f8ae9f54 --- /dev/null +++ b/augur/static/css/first_time.css @@ -0,0 +1,148 @@ +body{ + margin-top:20px; + color: #bcd0f7; + background: #1A233A; +} +h1 { + font-size: 2rem; +} +.sidebar .sidebar-top { + margin: 0 0 1rem 0; + padding-bottom: 1rem; + text-align: center; +} +.sidebar .sidebar-top .brand-logo { + margin: 0 0 1rem 0; +} +.sidebar .sidebar-top .brand-logo img { + height: 90px; + -webkit-border-radius: 100px; + -moz-border-radius: 100px; + border-radius: 100px; +} +.sidebar .about { + margin: 1rem 0 0 0; + font-size: 0.8rem; + text-align: center; +} +.card { + background: #272E48; + -webkit-border-radius: 5px; + -moz-border-radius: 5px; + border-radius: 5px; + border: 0; + margin-bottom: 1rem; +} +.form-control { + border: 1px solid #596280; + -webkit-border-radius: 2px; + -moz-border-radius: 2px; + border-radius: 2px; + font-size: .825rem; + background: #1A233A; + color: #bcd0f7; +} +.modal-content { + color: black; +} +.editor-container { + height: 300px !important; +} + +.spinner { + -webkit-animation: rotator 1.4s linear infinite; + animation: rotator 1.4s linear infinite; +} + +.spinner-container { + display: flex; + align-items: center; + justify-content: center; +} + +@-webkit-keyframes rotator { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(270deg); + } +} + +@keyframes rotator { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(270deg); + } +} +.path { + stroke-dasharray: 187; + stroke-dashoffset: 0; + transform-origin: center; + -webkit-animation: dash 1.4s ease-in-out infinite, colors 5.6s ease-in-out infinite; + animation: dash 1.4s ease-in-out infinite, colors 5.6s ease-in-out infinite; +} + +@-webkit-keyframes colors { + 0% { + stroke: #4285F4; + } + 25% { + stroke: #DE3E35; + } + 50% { + stroke: #F7C223; + } + 75% { + stroke: #1B9A59; + } + 100% { + stroke: #4285F4; + } +} + +@keyframes colors { + 0% { + stroke: #4285F4; + } + 25% { + stroke: #DE3E35; + } + 50% { + stroke: #F7C223; + } + 75% { + stroke: #1B9A59; + } + 100% { + stroke: #4285F4; + } +} +@-webkit-keyframes dash { + 0% { + stroke-dashoffset: 187; + } + 50% { + stroke-dashoffset: 46.75; + transform: rotate(135deg); + } + 100% { + stroke-dashoffset: 187; + transform: rotate(450deg); + } +} +@keyframes dash { + 0% { + stroke-dashoffset: 187; + } + 50% { + stroke-dashoffset: 46.75; + transform: rotate(135deg); + } + 100% { + stroke-dashoffset: 187; + transform: rotate(450deg); + } +} diff --git a/augur/static/css/stylesheet.css b/augur/static/css/stylesheet.css new file mode 100644 index 0000000000..6fd22a49eb --- /dev/null +++ b/augur/static/css/stylesheet.css @@ -0,0 +1,453 @@ +:root { + --color-bg: #252525; + --color-bg-light: #4a4651; + --color-fg: white; + --color-fg-contrast: black; + --color-accent: #6f42c1; + --color-accent-dark: #6134b3; + --color-notice: #00ddff; + --color-notice-contrast: #006979; + --color-link: #5de4ff +} + +html, +body { + /* IE 10-11 didn't like using min-height */ + height: 100%; +} + +body { + display: flex; + flex-direction: column; + background-color: var(--color-bg); +} + +.hidden { + display: none; +} + +.content { + flex: 1 0 auto; + /* Prevent Chrome, Opera, and Safari from letting these items shrink to smaller than their content's default minimum size. */ + padding: 20px; + padding-top: 100px; + color: var(--color-fg); + text-align: center; +} + +.footer { + flex-shrink: 0; + /* Prevent Chrome, Opera, and Safari from letting these items shrink to smaller than their content's default minimum size. */ + padding: 5px; + background-color: var(--color-bg-light); + width: 100%; +} + +.nav-image { + width: 100px; +} + +.nav-separator { + background-color: var(--color-accent-dark); + max-width: 5px; + min-width: 5px; + height: 35px; +} + +.content-column { + overflow-y: scroll; +} + +.dashboard-content { + background-color: var(--color-bg-light); + margin-top: 10px; + margin-bottom: 10px; +} + +.nav-pills .nav-link.active, .nav-pills .show > .nav-link { + background-color: var(--color-accent); +} + +.dashboard-sidebar { + width: 280px; + background-color: var(--color-bg-light) !important; +} + +.dashboard-form-control { + border: 1px solid #596280; + -webkit-border-radius: 2px; + -moz-border-radius: 2px; + border-radius: 2px; + font-size: .825rem; + background: #1A233A; + color: #bcd0f7; +} + +.notification-icon { + height: 30px; + -webkit-filter: invert(100%); + /* safari 6.0 - 9.0 */ + filter: invert(100%); +} + +.submitContainer { + background-color: var(--color-accent); +} + +.toast-container { + top: 80px !important; + right: 16px !important; +} + +.toast { + background-color: var(--color-notice); + color: var(--color-fg); +} + +#toast-placeholder { + display: none; +} + +.dashboard-sidebar { + height: 100%; +} + +.display-table { + background-color: white !important; + overflow: auto; +} + +.paginationActive { + background-color: var(--color-accent-dark); + border-color: var(--color-accent-dark); + color: var(--color-fg); +} + +.sorting-link { + color: var(--color-fg-contrast); + text-decoration: none; +} + +/* User settings page */ + +.user-settings-form { + width: 30%; + min-width: 300px; + margin: auto; +} + +.user-settings-form label { + margin-top: 10px; +} + +.user-card { + background-color: var(--color-bg-light); + margin-bottom: 40px; +} + +.user-card a { + color: var(--color-link); +} + +/* Login Page */ + +#registration-div { + -webkit-transition: opacity 0.5s ease; + -moz-transition: opacity 0.5s ease; + -ms-transition: opacity 0.5s ease; + -o-transition: opacity 0.5s ease; + transition: opacity 0.5s ease; +} + +form i { + pointer-events: none; +} + +.password-toggle { + background-color: var(--color-fg); + border: 1px solid #ced4da; + border-top-left-radius: 0; + border-bottom-left-radius: 0; +} + +/* Image Zoom */ + +img[id^="report_image_"] { + border-radius: 5px; + cursor: pointer; + transition: 0.3s; +} + +img[id^="report_image_"]:hover { + filter: brightness(50%); +} + +/* The Modal (background) */ +.modal { + display: none; + /* Hidden by default */ + position: fixed; + /* Stay in place */ + z-index: 1; + /* Sit on top */ + padding-top: 100px; + /* Location of the box */ + left: 0; + top: 0; + width: 100%; + /* Full width */ + height: 100%; + /* Full height */ + overflow: auto; + /* Enable scroll if needed */ + background-color: rgb(0, 0, 0); + /* Fallback color */ + background-color: rgba(0, 0, 0, 0.9); + /* Black w/ opacity */ +} + +/* Modal Content (image) */ +.modal-content { + margin: auto; + display: block; + width: 70%; +} + +/* Caption of Modal Image */ +#caption { + margin: auto; + display: block; + width: 80%; + max-width: 700px; + text-align: center; + color: #ccc; + padding: 10px 0; + height: 80px; +} + +#close-caption { + margin: auto; + display: block; + width: 80%; + max-width: 700px; + text-align: center; + color: #ccc; + padding: 10px 0; + height: 50px; +} + +/* Add Animation */ +.modal-content, +#caption { + -webkit-animation-name: zoom; + -webkit-animation-duration: 0.6s; + animation-name: zoom; + animation-duration: 0.6s; +} + +@-webkit-keyframes zoom { + from { + -webkit-transform: scale(0) + } + + to { + -webkit-transform: scale(1) + } +} + +@keyframes zoom { + from { + transform: scale(0) + } + + to { + transform: scale(1) + } +} + +/* The Close Button */ +.close { + position: absolute; + top: 45px; + right: 35px; + color: #f1f1f1; + font-size: 40px; + font-weight: bold; + transition: 0.3s; +} + +.close:hover, +.close:focus { + color: #bbb; + text-decoration: none; + cursor: pointer; +} + +/* 100% Image Width on Smaller Screens */ +@media only screen and (max-width: 700px) { + .modal-content { + width: 100%; + } +} + +.img_placeholder { + width: 256px; + height: 256px; +} + +.card-footer-wrap { + width: 256px; + +} + +/* Loading Animation */ + +.dot-flashing { + position: relative; + width: 10px; + height: 10px; + border-radius: 5px; + background-color: #9880ff; + color: #9880ff; + animation: dotFlashing 1s infinite linear alternate; + animation-delay: .5s; +} + +.dot-flashing::before, +.dot-flashing::after { + content: ''; + display: inline-block; + position: absolute; + top: 0; +} + +.dot-flashing::before { + left: -15px; + width: 10px; + height: 10px; + border-radius: 5px; + background-color: #9880ff; + color: #9880ff; + animation: dotFlashing 1s infinite alternate; + animation-delay: 0s; +} + +.dot-flashing::after { + left: 15px; + width: 10px; + height: 10px; + border-radius: 5px; + background-color: #9880ff; + color: #9880ff; + animation: dotFlashing 1s infinite alternate; + animation-delay: 1s; +} + +@keyframes dotFlashing { + 0% { + background-color: #9880ff; + } + + 50%, + 100% { + background-color: #ebe6ff; + } +} + +.spinner { + -webkit-animation: rotator 1.4s linear infinite; + animation: rotator 1.4s linear infinite; +} + +.spinner-container { + display: flex; + align-items: center; + justify-content: center; +} + +@-webkit-keyframes rotator { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(270deg); + } +} + +@keyframes rotator { + 0% { + transform: rotate(0deg); + } + 100% { + transform: rotate(270deg); + } +} +.path { + stroke-dasharray: 187; + stroke-dashoffset: 0; + transform-origin: center; + -webkit-animation: dash 1.4s ease-in-out infinite, colors 5.6s ease-in-out infinite; + animation: dash 1.4s ease-in-out infinite, colors 5.6s ease-in-out infinite; +} + +@-webkit-keyframes colors { + 0% { + stroke: #4285F4; + } + 25% { + stroke: #DE3E35; + } + 50% { + stroke: #F7C223; + } + 75% { + stroke: #1B9A59; + } + 100% { + stroke: #4285F4; + } +} + +@keyframes colors { + 0% { + stroke: #4285F4; + } + 25% { + stroke: #DE3E35; + } + 50% { + stroke: #F7C223; + } + 75% { + stroke: #1B9A59; + } + 100% { + stroke: #4285F4; + } +} +@-webkit-keyframes dash { + 0% { + stroke-dashoffset: 187; + } + 50% { + stroke-dashoffset: 46.75; + transform: rotate(135deg); + } + 100% { + stroke-dashoffset: 187; + transform: rotate(450deg); + } +} +@keyframes dash { + 0% { + stroke-dashoffset: 187; + } + 50% { + stroke-dashoffset: 46.75; + transform: rotate(135deg); + } + 100% { + stroke-dashoffset: 187; + transform: rotate(450deg); + } +} \ No newline at end of file diff --git a/augur/static/favicon/android-chrome-192x192.png b/augur/static/favicon/android-chrome-192x192.png new file mode 100644 index 0000000000000000000000000000000000000000..a85212db065d117d4d440952274b3d1f1a8f2080 GIT binary patch literal 5679 zcmYLtWmr_*_x2fJ1nEYkb4Z5{aY*SH86-u@kq`tFq=py}q#Fe!9|b`|I;2xZr5w6J zP^5bR3IB)p_u;*+(`)Z_*52!08GX-4n~MAPwzgb92cTd$w1h_5jD8K4TnD!n-_9Pw2%v-q;6>jFLYZNz!_6 z$dYesMf6bdzte*DK<;|dqFJ3@t?9-U2O6uq6B2nZF`WBS5q9&}#bt3P^h0U-gSm#1 zvvRNC5JRMC>rPOf)ppRfxN4vYpa6&f>@1D8~7 zUDBscG*~z!kkU>P%eSZ;zv5;B@rLd`0Gb7$0}@{+sTSpc$GA?8PQVj_ogktW>h{|c z#M%N;T}6h(o-l0AqBe)%$tI+dqQjuEPPFy~WETyDIb^sMN1RROj*T;Q5H-SYu5sO; z(EV5lYCKSOV&Es zWzQAJCE7KeCfyi)-Cx!I-(>dfY8DUvW_M{t*M^juU^c~y;BOL{ChhUrbzI308!|Tq zJnp$|i%9hu@)lw$4DRkeZYDeQj$Hse&8?KoO;I9e$^J*)&uuQ^^sJPfGP0Lt0`-!0 zeN#|ViTN&T)^G%LfS9EVRzv# z`|*NWR{+#E<#fY|X(P4KkkjR#{?*fOLnP^bVGdqq^m@$&^qo?hkJ1(BKUImlcqA&i z(<@TVj&zf-6}gaRE(;c}b3AdaIQ6KwG+B~dEWA(P!|Z9TbxIoCo~)}#wF)Xu2ik}v z9`8KNFSvMO^4Dy89j*S70eNqw34=N!YsLCx-GySybwg%9$B}-{7+wPR?GNg= zKo|{#owCuSzfCvbh)c;hK!P%r>Ua1?E6J`bb_C*UW!a~Kf3ZzKtH&N0@Tnch49^ky zd54IYj-Bs+4`*(6_bTbE(!?q)w{>$l%W8c6G$V$fef9O4(|{TN$t%C>b_|QRlzuGH z9GYmS);N|Ps@@P}%l>PIkBleKcNleW+}z$Jy#h~7aMu55ueQ+HvV;17ZCB~P1rTD{ z<|?l_$-2=mM;deDl9x^5N%=4MSl?R!27b?OHl#{^GXZ*t?8up!0h7^6AmiODD7WKT6d zI-eMzI|*+y)Z2L($biMyWpA=9yx}yT1R3ZA+)+zrYu@Q?XF1qs zE9_$tv;aL7?FRwb_MytlCCD*D~_3$i~zQ z4ruZm;6JXoIj3^^4uca!f0WiI1xzM?hF|U=u1&!&H3DYUBB4pJGQ}4pbGR%ZpN%nS zQE~0r6(8jkc4U&zKJJ_;L}@xHuSS;Fbfyw`o>*5IVW+i!3&^0W*Db1ATP$fd!Jwx+%?*uvGT9x7S z`*;gL(ZD@ObgdieHY8ApM$D;>x5%n5le(o6ocWklw`uDTrIB2Qgc#phrns|$BmbcU zu9uU7IgHGz)X@eld}pO9s$cxmQ%80)F)+o)X~S)kf^(8$SZe{ZQ3883>hL%*i2-?a z`2P|oL7Z{&)a$}rV9cuSQS*D@!%Pp>X17Qa#j8}rc?xS*kIPW>Im+4rHeuH&xLI_M zItgW*&nOiQmD*-jMY&S`F6YH;c>yy8$F8Ex*ncPXm;{5*$B}38FkDOj!Atj#_F0F) zmB6`CF<1SnPTFSr!i>j)+15fY zu1k2%$$D82j<|FfuoU)rEyu@*@%G1G^!*;ClMz3=*tn+2Wx1QD-V*Mm?Y-r>WNwg0)LUe*tQeC)-~OEb1C$8hJrJifZY}K&PMLzXhEpO@0F5V#PjFlAt?mOEpugl1yVAO)Tekcz}EBW5F#SxN29aDmp<9SX6rv0 zdX~s)=AOb;q@WnU6=l$5Qbw!vLSWamUoR;vwx8Mt(>k2K6hCu zn@%~cns5^+cZ|w7`6l|+Wp)W;(?vv~n)D|JNzTMAlhYsVi$0QC@9|CcKaF?b>lSj) zlA%!L_dlA^QPS%tlzFd<$Ot_~T*mB9IL1r~4iM3D%RQJSlyP8Ul9WckNHD#a+Ow^$ z>6YpUL1!tbxDCaqSDmqt=jk63{2hpd96)y5Gd<<=>g(8>RoaO!Y0f&!D$0fm+e}Zg zx_h5oO^dGFV$H{`U;>r$aAPCA+eA%jRlK#jH9)oQTbg>OLJYDx?<~t>72Ueayo4Q8 zUoFV-)cRq`X8}wx&Ul5@J7Ew1DduIr;5wT~+EC%S=#d6_IEI&<_CCI1ANuU_byx&* z;hzDTvw*r6pdJlAr_)B0=VVD%H@e})u#|hf1Zy)j0$x5yMc!3SS{QK_mGdBZs=yM+ zU;h&r0R%2q zS;+D{y#GGJALJfbR{?k()P)>A;kH^?zJr|RPLQ0+Dm$_dH>ML*pU%t4&2*hL zK}U>P0D;Hph}2)n$+v~~Y)Ay+Y+F=Z3E~HP|N4`8&EMfJ4x&{MbX>FC&Z>L%nW!X0 zNFSPZ#Ha>obo6J{D?SM+s0F>hyIwuy*Ya_-XN7$z{Ql>&=&@3F zrUY%SU}{BIitpg2{sAykf~cn0sa)e}#d~r@&D(ih@WTja5PjaNE7Q&46>ok|_NOZi z$^OOZ_biJ@fJR}r5H3@9eJ0l!r8mytTG&?&uPR!PFs~7kxgv6%S0Y^da`8*)^N*KX z_8}j&+41nwV^60ochyL?c0y=2@Kid*ODZkw;Wo8@88=bsx)uW(Bo2@ zsj)|`_=co`T&TE^NvByeJqwtoqomtQ+6pwiVnDmwke(%UO8?w;`_6>LkW32xeM82{ zfRLt!Tk0)$$O#%Vj#PmU2mRAs_ISd#MbcUEjXN+gLHh*)i3a!5x zs!0-Mn0~+Mn7rJusTjH#xy3pcaPP3bPdMPL#q)XT34IKJJ3rbfY=BX4r*>pb>#R+g zTLW*6JoUQc^Dl@2VMIVAUhrWpQPjMEJH|0s$6M>f4X-H-*t!k%2Nx43lIIvU30*PH zT*Iaq{a2&Qk^`JqBcwTY%MEY)rG{Dn7-Aa`k)c29ojyDo4PC0f8d3HcPC@fen)8of zuj81WhZD1^cIOv{0vd*YgYad;4Db&Ez{clx5L}ru67SIiG<`Tv0=u95ul z*cd&15iziW>0P}J6y*81FFwU<@&FTrS^Y+}f#?SPL;>53<)SECiEl4GQg4w%?tW^1 zaQK<$B7k_)LXSK-n6#jC%ku7| z;0`#p?Wwpl79Fi=s}_*+eZ8yR1PI-J^y$zd#dw$qjEyjI`Fx-Luc6l4cUDgizREM9 zv%J+PxaEox7=H)=vhlp;bM&}hgI+f5Y(6RedoQ}Urm3q9o9v*AGCR*Fx>)KE={`KJ zHoI+v6u3i(m7d z(^2FdQP1(HFh*=0j%%jIf53aKiUhixveC0cm&I5X zA>JmI|2VGcet_6Q(IUxW`j(}NKfj`@avCS{p67V*-K)?G7lVMPNdp3F5Lt9Fvl617 zL!~IMnU>goSH~z{F_M14hZaP;-wCsYN>j1&dWkS+3uHO0(0bOCjUSZvdWQ|6y-g;` z=O-B`5M7Z!%%v|^BslbXl3`Q+6-X?{ga{{3K-%lce$AhX*KjRTH$eT-VQuT3j&?g| zkMrKyjRr?XEkH#|a4spJ=5Ihw95810V`k5z4lTlr(EEtCS?Ifl3(Ej8C=XE7_PVwc zM04Q+4;nrt)M^}-aMAUk%|MbW%&>|3>W{43R&s--ormS5*TKPuCcP?R2`NU5Stc;~ zH()6)KjSs}4y*zSSj;V=W+h^2>w2DlDn{D~P6%=YUs<=A2n|?8*Bo|{QxJZ}7u@II z&-R^?TN|e6c-#C9wC`t2W1*}$l*C2{x1FCTu|I);!T!}J zlg-4=tdES$?~cW3J&3V!#ta47A54Pw&?-F`x|7Qtw?*)-5r`w~<) zn7$tbU;f94^{96;6*&005fgxKhC^lkN`<|Ps*3=%qySw^-C`YtFk(D~UwiH#2~X*e zja(e?J^R5*k}Pk~fq>9hJGR9~{-l7M{W{huSH9nZ0Mk9I$&?8%nnED33Zd~I&X7hI zo~zCyb!x*kO@g-Z*S8r`@+R_pJFW9^6PE4#^bac@VGt1xb{uLSA?eyF4*M$%Z+nBj z>!8*mMK-7QJ(6ri33P5A=M!e0Hq`Wv4yd~gStO*$lP zGOGGqT0q(*F=aQfpgQaghZvAzmmz_vo7%r8qw&P{8Ez6Vg9DMv3P?6JfPe(19Y)TH z;9M3z-4nD{pvt3bIzFWDFas_i`6M0W6?<4*fs((yu%L{>9*O1$L{iDWf11D?pbC{> z>S#}GqckV*H4|Q}h5oOpQS|o9DHB$BtBwXxxY^OC`%i5Q$F34#Zed512>R47C&DCi zyYwA{gtz~EOdw+WC#32{pBOP{MWmd4nEoBLD%d#KR`Fv}fmobgl>VFFf&o65i}zrQ zr)ui?M4(d!+QQ2`dAcE_r?xkrQ(p>Eh z(%qf6cX5*HGvYrxooI|r`>F&6AfK&`Z3>Tdr_0tvWfow#g#-YMf#bp|jj#{NRu_+A ztbOHa-bng)yPU-Fj%@pK`L|#&ZA0B#bvff7I=7hqBHt>(yl?S&b;(rcy8o|>R-Xc@ z*r?m*`AuI517J7P6*$`qdk4;9|WV-(_!BD{sSHHFIj6*w!09!{-iLei; zC{shL0a~yVBe-1l3?0{$W-sN`qX541fOXWy`k`FCfav8ZAR;Ax+4=x_XA@7!;Q2-^ z|K!;HCiZNp?XDkzTG9ZRByKR{Qe#d;e3c`1-x+p=kR{o-_%XP9AgAFiYv9m%z!5m% z1wy6zkEf^_j;m<`TlQfy3D`5oxoZ=aeTiBIZBxX@0=_Wp7!Z6dk#64FO$5saF}PMq z(n0F!G=(#wG?@$}USw$WviJbG5l0>O^mO)q9?jx@|m}nkJkX|P}do{;L+0%HT=F!f%4!P*a!x6HI43mQnw5HKg(gW>;M1& literal 0 HcmV?d00001 diff --git a/augur/static/favicon/android-chrome-512x512.png b/augur/static/favicon/android-chrome-512x512.png new file mode 100644 index 0000000000000000000000000000000000000000..ef273efb0baf8b550d0e2f466cd13128db443e5c GIT binary patch literal 12926 zcmb8Wc|6qJ`#5}t2^EcLlO&85MJa?-W@IcCvK8en3aN+`QW`TYm_i|1=#K2NMkSK* zDOsy&Az7lxHb|x>X3X-t-unE$&-btA^*sOJ^*-l1=i0Y(xngf;r7&md9E6a<4r{6- zLQ?Rj6q1vHU%x|J1PH049aJ;t(7R(_!m1VpKbK6fUng%GS%2svL#aml?8=X&_G!jz zduLf#RAtDe9~#~qnVg)vdWT~1?Q%y0=Y>>z{WsS0FDrCx3{c(l*reP3LLqBce^&M2 z^{j}L=7ajaM`#0EekIrU@y9#LO!a&C)$LW2`$@y(b5|QYh{~%;AF1D$6VEzRZ!#P7 zN!$m_g8A_Ti&}&n$ISD{zuc1q%S))7uoquQrz%<|ul|Sh!Q8mOL~ylX2T9MIdT(Jo zfhsv7jXn=c6Zfs>ZR@8Jhc%a7*?f-KKqQ^YU{B9lz!<*-jPQ|r2DM?%i$U1pH4L|JH> za87Y5;bcMZc?YWh{CHNhI6?}k&!a2Ft5Cf>zbRUABCedVki0t_h+QuotD?d zjuQN;vAHeyF|J&=XdbmyHRgm71!X9bwW<&!OOe* z1DQW3g$Na}Wl5(pGl#USMFE1xvU3qb0T4@Y4fk8ibN4+;Sb$K9j#4zsJ*}8or6VxM zqZ-TND-o%rr#F~Y8iJS7sOCsObi=Jz37&DxDqVrOG*VX(7c)mSXOknN8+xd8SA;Sf z-7UU*>=@eUno!Ljr1kw7^Qroe>{7|l#~AvDNt;72Ryb`P-04&`Wa*T=RT{+|eiiL+ z@alAtLB#AE*p3)_OJ1GI`Lw`kzkn6vBjZm0MlsCHj0T@#v`<@%eG^8HEIl)QAcp?C z+;*7z%*Ol2$Lvo}np$qM2&ksxW|2Wsn4~!|yZe|hW4Hc~kS!{K2SlRLU){IxwDUhd zIqI%CN_b_QRoX~U@!silB$-nz&ZX<)P{Cf>?@dmXqdu$1L79P9SMq2-oC0=;bjO^p z`;)i{N9Vl=w)Jn@W;@Z9Zxj4Yg@b8M*5bkqS&jNJXQ9mhMO%tp1)M?~FXWvQFHL?1)k!v9g~^-6KkT1x+J>{KB+ z%Tqxp?nDxweMk6hwaP^FevPlm%s;;)ufNnZnV?iXk4$c=@@dgEM#%Sp=~P&sf^yL! zDNE|3@-yfBR<0ALspW*}IwhY^5zLZ79_Bkg9#JlL8Q&|*2svi9=ABafYPUbx%j&1B zt{FYH&~DLPi%@K?srbo+)x8fK#gfsF+f1JHarZt~&iOOTDfwKKAOernV?KD3_3a9G zX`9z1TAvYax+W%ZcW8H0{d2z9dW*|AN(96>;ykd;^VF8nPY=+~hs@FW!*6v-*O)rT zUa9<)QS~?;%}*jD^_7cRo>dxAkJjRK#6j$9qsA4BR^KE4JnH0D=_omjLyB2X_N*fJ z*6xj9ymvSuR*7zS7MNN>7b**sEs*OPl6wpZbuFA8Apcp$im)%!WCW1Q&Knq-HvJo2wf>XYH_!R^mIRUPAs0GdDsN! zlx(<*-hj{w`(vDc1I3HKzO$675d{iQ27DEBUd}@5ji3>Ai_k#G6aJYS#^T?__TM{2 z2n|vDt9z6uq66^^Le`DzvSPbsjW5+U+tG3n`m9d>eEVtm+>@M0shS({5{hoX+Fz+o zYFY>x=5|S{;;YoGkZWo68+x?yd@FI9XA5wEVat_Wu4;v*qL{z`aY4rGxoQKI*?NO( zcxF<_<3`iy>e#3L%QU_Y1o)zVp3X%n1F#z8Cw%#dU z#*`P@UUZo+>VZ@5FL;h=VL+Gl_Ja%1`h9D%~nO3sI6o!w9h^ zNM|AOEU8I}XBF$Bwh-=||I%G*>uM6$FB9kSn#wbS3s*%<3|gq7?)g`y+|b+{_e@g| z+J{Wz2`=1qoEV?vP} zF8sT3=dJFg!Ds^>BL4hq@Pi@)f=Wcf-O#U96?FuM*Q88`7hiu|*2S;Xd7xn54_g8N|XmWoZ*#)jFJ@O(Y1u(?RUE`oDiSJBT}L$}Z`tmmGvd;8Zt*)@sy|$(0$+38yhKK@%2d z2-{}E=5qiT?I0dm=1uYV0=~LXOS0)QfcXQMW1>WAt2yx67f8FAE^IRffDYIcp(U9G zc+*poTlb9zx{~|)J*#&2ZLu^$Vg1ZRV;>Zwxz6^~u~;!|f*8r9Ah2+kT@)Kf2o zvsV1OvY(veMsRZbU!AHT{uOEv%1s4owVgWY>N#&%lma~sC~P# zqOv8J*V~)B#o7nYe_JYAc+ieGgH~ z4G)h^muD7$hKx%A z&lYysAs|r$NHp_XE7_dlf8=;`#}?Xdh`%lDO&Is)0QVN|a#;!~5l^ASf&a43;7>K= zYjzb6!mDS{RYz#{p^vL2SD0(ujac6%Z}v#6dA}5XzrJI*OB9ZJF`2!9dp~;jQ6xLp zA2`%Tm$nD)0Ne2;g~D6pq1ztgfsJ8x!=(=eXN_V&3mBjU4BLKk{7DvxO&}i>Rb5Jx zXFdXx>+lnNC-X;}>3i|vsr|#hTG-p(O^Ha0V zEK3}I@#_s-yd^ALIMGjTJd5dZ{BsG{mN3G~Q3vrx0ButA(anOC{ir+bP&~V4tn9g9 zdJb6B2k?H6k|X+A#$rO%Nx>`5xHibnT15)*dy*u%tLuOK_^$5g^YdDx+zosCTYKEo ze87Wd%0aQU5!fZ3?NetIZ6=$`VqrJgf+Ee`=OSLPF)8e3z2Wr_i^^Jh+|>bZ5+0>R z0A2mn!q`xs(x9xs%68r?jH1P`vPVZpvS{@e#p!>9?cRFm_jxW*t<+wg>&`bXmHqDFhebhl46 zSA>JK;UJ~C&2;cM`Rq3;r`e1doxozdmAe01NCFPh z{3(iXuFR(-i6Q6s@o1M^_Lx2tK1%}+@G9^QY4UQ-U}m}Pu40w5SH)c>4PzUvY3ng& zoB(Fj7HCVZVBUapdL%cr@eSwnY+(U}`e=Y*g8WC?+-XNf9)(5(9tY}nW7I7G>NHfE z>95Li^4KS6T&tKpsk=cl4iT{h;Gd&bGUp;0TU&d^^gcO*0RjQtP+toI;ZCdNk<0C# zc8;f;Vk$keKr*~3X=)?qnwGy^_%<09YS=o!cF+qZ6AYo;B>-YX5v)2Z3EQpj=b9E{ zkXULX1(0)c4yKuwTlPWry)fI-X#xlnET ze;D#ABK9P2HK&D}L0oVI)hz*2=?~#2i@TC1YM<2&S6&e;!^{Os2sA)k+V{wC+#4<= zvZehwwF8vhdO1< ztFTyE{?ecGOCB}Kq+xaMJ2^Tm#McaIj5udhkw@MRmgn z*oZfXcL69MH!9X4)h)k2MEpR|B~9+slHN|lp`pvc{bZt3HSNBjYky&=_%Y`+0n#A@ zI8*^dH0WP;Nzmn7)cV3G)~Vj zwG>_y1kV+YKI1OuwQD?=Lk(NOt40L4=b9RIhMGs_yO#rjBs)Cv+RwYMGzKAetCNTy z%_%Fh9p-Enw5@M>EDiwBe*yH-i>Ie~YFymeFv6bHl%$TmD&_S9oHpIu7i)UjQdJ$V zK&Phkkq}C>9Py}5|Mfe$;i8Mc*r$ppZXa;rZtSxZC=6SCeDa|&7W1@75G*g0&y;5_ z`R@i{Uv)m<)sN+nod^sb0k z+A&Vk7X>_dU|_l>qWr-aCH`6uT69H7^lUBmu?L_{094=fU(e>8dSHKIfFxG1Nr9&W zSjZ7k+-5+N8+8|a{iDG09t%;W3<%4Cgw&n<)=82A_(_7%57{R_E#!?X1z$;JB8JUF z%+6ZDQy+6a@Y6>#B3-~!z>wIEC?OaUcLVdpqmFA?Vb~- z&vWm~GqtdfhNj>D{qcN?)x9_uI*riX6rGa=%-;a!BAs;-zUL|qd%b4Wwy0e9aZIE4 z;Zf$jq<(V!#-xhf`Rw)DhtDb2UDH!fHbWH7*-MdmqEH4BM{B!wC_+> zRG}aS51eySQ?}sq2&c^4v&o2D5YD~~DW}SI#ApSX_Y+JkX(4iTSm>ZF06hF4_u5+q z#8vYTfrt6}JOnC=N!ulfxQC{A^f~{;B}hZyOa0uH+!GqLc8f>Fh5v9txmCWfn#6e{rUe`!2;hhwh$fIaV9V)mO4tf=i^K*1WQ zW+xa>sG^(y@Xo%Vk^A9lvp_~8fPWHZ;YGKzOt%=+W3gjeOTLLQ1Ubz#Sn+8PN#42x zd1SId)~WG;Lg7XAzl$TE_ltj7NuI&|vHwVl^LuvRzn29RK=do7kMFYJmxW z6X8sXYU_BhJ-a1;5H1k_bR?KPqzq#l;o#*F`s??wSTBGz9jBy_&LPm3FGplyrEZ%0 zzAa{I7H?W-1Mt)iF;)V97K!_;0P`x$JB^pwPp(r3wXJRs8@XyNa` zYAx3);JR*Z5B>hVhlx0S2w}CQPlS^%-CBHPRrxP>?`27VJGX7|Xhj0Jh1kmEj^0I_ zUze_N$5;+5S@Zkyx1n>hBB8LKENPhFlwDd6k6kU}FE?mOA0gmSqo*%_H)l-p;k$Cf zhJaKeXY$YzfA%`t!V{0h0xKw*;*3EZp^+!^dNJTt|B0*Sv89R84#6WGG)vm?f}Te< z?9zf=j7NY7Br+?aUiCwRcFL<}6v*7DCBZdPlsBk=8@1TGH8&Q9OWyXIm<7 zXq7|u7JPy=G#!U65Mw!<*Xtl-qbr4e`&WZjk-Jb(QP7*`9QNGWpYp4qr0*x!%!6CN zIu|8MP_(^_%}OccywGl&yX3x1+Mx(1U3djYz}6_l_|y{wo3-I&B`LK3 z8t?%+o5R3|T@GSt9l!&Uu%m05=!`Fb9ysDcb3UPDxp&QPb(9utqyl&^f(2{Oc&phOtr4se;j;_-v@J1HN?I&NWPfk1Eo)ekf|-_@ipAAY$v=Y>Y#2S zu8@i-eltSznDcV@(+#$XRr8*Kt=`bU7?*6Q3Q#RDCF89SMaOTwgunV;mMPiG2}^t? z=~{n*bqYk^v0xr1*+mQA*U#7p&9KebEI>ncFCn~uC9^$U{-~N1O7{um>y}SbM0|~B zSa+>3T3{jv$k6~fadzdvL0*)D>#JS#_mG3kBWMf9msUW=dbmI!WI#hT_Su*nXG8;H~NTBC^t|*KaRe*~It+>Rp-AIKb_h z%znToxZaoyhQ{f_Mi$HJN(7)7n_-#!4r)oHk+Lg1?jA_#%C`Fn3kBCk28s+HV? z_S{%Sgg_t{p_jG@=)C;ceOmi4^a9dM$sPxU4+nFLub0fp5m9L2pP`AH>~orb{l)zG z1%jHnmR)xRCWlUBEx@5SJyekD9L&9tJ3%FNcRX$=HC}Bp*3}RV};=N-cx3 ze%8ZLaJmRtN_bKJTW5Q*!QwwizEVa&XtZ4@QB(9X`!3Ht@~{rdeVM zRPn7608+xCZ~OicpRNqif4p*hn5}q}(qELt@m&QCi$)7XnM$ee<)+d{hdIBbFE(IF zROM`ue2)BWYdpFPnCz}~{^9SGs-{StxU1dTX6u@Wy8({P9+U^`1;Ax#53WD=zk7HG zO>}v?@`iXhB=-tlJ|=|Ni7y%mDp}RxAf4M=@d%HZikn}Sm@@n28W-#-U7(NOuGKRY zCs;ZsTF-;4h!lm{51&FV69k*tDLHK)v<;txC^*X4fG?B+wz(MhMFCLySA{Nj+O~*; zQE=uZ#Ha)2+^7n8a9wCdkEGzSra3Dc6#VuwJTeF`bNxr$cgZ?#E!ItTBk+P33GUb_ zZy^Scnf^wAj1G3mk}s;VsGCAJ1(HPb3NCFKF%gAu1%+JGi@J zVgkpx6vV_<(}&PQS$4`@94Qb0>1n1?$OLqS9f-{|09Ov%-wnDXUOM;6-zC;f&~jj4S2f;WvdG z{5mgqBlh&~BUhWZwXcoiyY3N$K!Srz%XU;Fv&e4xg4mq2g8qFC@M!;kzWDp_c#HRb z>r{Hc*3FkguH=+vIw@Q4bi7#p!eT99{q>eMH&6whA}SH5yo8HcVhk`#kWXGZ5zQG{ z|6Bi_UK%vwKQ(i>J?$Ssl^-RPv*mcfN{hWI5O@l1yy+*eS!f&lpH#rfCs(RL7y z4E~`EP?!pZOEiJIkGe`juvY<^D8fRyga^2!^!Ky4JOYQUXm7s+o;3a~6yOm3_05%$Kt@gn08^lU~)krw;u^gIOmfuW= z=A>Gv87-2L4e?dryAgO-_}m3RFhsFt?=R_=Z4*U-mHz3~%U`C;e>TA*VKkruuZO>V zFHt(Zw>_vo}-2Bh*b=&B>K@WB~9hk}v zY+A(QDTDG&5s?RtUp#e5ZG^38ohH0sm0!#TA5aG&`dKr5=8?dO7D<_(6mZU!7Hh~G z&5>UMt=||Uucw0T1Z{f5Y#=h>HI`JHC^TbjVBv=t6c_m9mIO--0W@Ke9m|^bZ{$G# z8tFPFEP<|f6$vCmMZ9- zw{ARzqgkBGRxB|))}bTX)Onfv`z%M_MpKFs+5V3>V;LS_f_3S6_U`2clU^FgD{)Ms z>xu;wWhui6EJ3bRh}HAZkwePfU^BxrPbqnD-VSW~0g4u$3b12{h)Nr{;M=(EU{E@V zohKLqfdX*&H^6xX|JT+y`glL*8C&z0@~$~bM0yRNdOz|nf$F*X{%z40_W>6zqfy+( zPq!rEUpW}eO|oB%z+@hvkqO5+P5p@wL>sSw`H<`RMZ>Mw}FkCgm3oVuwbdIE}rxAfR~u?n-gI^~?9 zy^P-+oVqk9YDEff<74kJApHaO>NI0k%Yn-SipcTpAFf&tlBsQ;7O?V)f4$R^HjF@V zr+}_g_wvMdUn$h)8So87kGr8rOiq*`Inc8|c?*|)ZUh3wv-x=Z=iKUgo!2ug=gh?M?ni=#;NA$P{ zP}3;dj8l)$kz6hoR!4yeH6(kFh0tw(4$%0o;}~$d7bx2r7hB$-q|nq{JsMJ$62YYl zTGBG+$XX+yL4z}QV?pnXuk*S?<}Tz7Qh=CCfU^xR-UX|FB9>T+zj}gZd37gD+W42_ zIP6RI0KXks-@zV#qc`9K-fa+%60DgVO?t^673vyk!)q}AaWfp;AxrbK@WSOhQ(5p)N(vn5x7UpZXD(cRuzu=BlA4yKa>+(mewZyBtgKThNzHqGb|>u! zGI4U>Ef^r5?|MKSd6p-wQWCa6OAmM0yFMad_iIEyU{ zJR|aHiUeJd!>O-?Iv*zaGQBqYUR`sLyTGa1zdBFM-NRGHG3~s&^|OQezhpym8X8;P z0(Q#0nYKV;(EpdYk^f&qCb0RcHafqgf9_)4N(Y)fj8g1?hF(Uuki3|d~Iz|@M zdrSiqv$S|-(xz5zlPQeP6fhe!VXDVyit=|V2i`C;<{{Q%;vLog=t|o3--BU#Q7IVR zUPsbf_dmlyCI5RkXdy8^kjTo-drjL<_W-;p-fx{80>qZoIwn*8svrVh<7Ka*Ws>F1WENYqUH z#U!3&0rR(P|DPq9rqG=BOPp>x2-=$RX%V^>EtJJ7>2OCHv_n2;o?@TUEK;#T#>Wrf zJbo<+kHh991?n)Zu_OF*uCbwn&a}QBJPQ%yz5nmX+8(u4e@E8NFLM`rQoSYwhIC^5 z?2SUo4XkFM0IV70KY@P0QoDA0^qEREjS)r(ES&DZ(3d?7Ka|2-X6#f1oX)z^P^U5n zkx0LKt(SMVNo;8#sug#PJXlY~JbHjn3X9orXrt&PHjqbq3QG<)?$gtE%$Lqa)y#sJ zKK%d8@#(HJ`5C((068}u&hPn zc**>>#-Fwq5Ng3D*#-wBF2rHw74V2CG%i|j>iB`Fe1s?idj?ET+~rsX$}oH{0dGk( z>i3MNc5BP~Tx?EE%;9~LMrnuN&$%(uMt=K7qDgGmvq6f)JH);#%J7OYGfe|SKxxoo zR~~nht*_v#5c$VGzj3E}g%SB-cURvg2R#zQlT6y4n~zh3Kl}Q?goyDcQZVd|>dq(B zfo(`2@pvaCzT)%hIe_2k9v#qilnm9J@g(5tQ}2g6AjEV6r?A&gJkGRk(~@E^8+NQS z{w8pd{Wu#Na_W3w%=(^_(U;L7w4g2{@@=*4x>O8aw=AA$gL&peSK+mZNcn))0oKsR zZCgHg`dpb{OSL7AwDmX-m=@CC!(8Tec*R*&{&9I>2N-z#0nIP`Hsiz7%&*t(?pHG1 z2su;Xhwq)e;<(gn>!X9}$4^d?N~ZUa7?{NA8^4+cBG0uS|9D7Rcq#x*@u25_D zXWjGMl}9hLZ|=VRu`p+iU8R{Q9p0HVV34F&xR=E{rW9WtKgRucne3GY^5JlE-^Q_x z4bDf%dEqbw=;{QCI(Sz1e53GR!w1hFrG|yQtba_r8+tPQ{dax> zb>FS-de!P4#Wb?$+R@#FpoN*QXbkoQcNM+HL1+ogJq2S6DZ&y5%TxD0)dggVCE{@W z^x0g`1;n!tZ!CzpHIm#_6qeju;c|!L37x@(h!OO_^iL&V{h!b*b}sre#Aeg2=?_PTG)cNw#6MvG^nE_5G}iT8kW##N zIvHLm{XVQBeW~SY>XOLwBgW?;RI}aaYVT^{t3Y|B_||l?D?$lF(!|Z%9nbl#A?>2h z4Bs~1AeAO%3V&L7U1grv5lF-6{>OUKsXHx_XIu5_*ON+Je@DR#bfg8Tw9M=GN$U4d z;#sTTFqno4OLkJLwMj)MtzC%xhs^UNfszbj7@sGR+IG;`eM2YxR+3z>v3bdM7=Mnq zPW>K2Y(Hphvh7JC{AU3C^8k3^7gH%MM&iN<&7behso6V4l3kBq;Igq|@0r z*>sMhWNB>}^}G3d7PzK@VRPbH3g^|I(v!8P&YN+h!=0x>41`wiqS} z>Q{AZk#w12c>c34FV7Tr4h51*BTlXu7+Oclu=u_>k3RR%v!Naxgtk21MboXYpxVWq zGyC^tdd4%R{KEK$j=BrqE{=~NaqqU==z@QAAg9waaA<#@bRK!SCsA;)#GX1}O}oBC go3}0FS*CEx-H5-YAY&iV8{~#|SlCgEwtHOsKdK7|d;kCd literal 0 HcmV?d00001 diff --git a/augur/static/favicon/apple-touch-icon.png b/augur/static/favicon/apple-touch-icon.png new file mode 100644 index 0000000000000000000000000000000000000000..c2dd7cb22c4fa3d21655e4bd914b3012d5a63e14 GIT binary patch literal 5236 zcmXw7cR1T^7Y<^t+Iy=_?20Y5LW@S#UX>!$R*DwY(Aos0DQZMpMeW_xs#(=Euf3|J zB2|0j)fiv;Uf1`>Z#~a>&huR7`kiy%_er(0v0!BqWC8#HtWZl+80FdW_hO)<+zmd{ zsRIDqdQel7oA-;iN)ScdQ=P*Xt&tA*99Z?3no`B}OigaEEK*T@ZLoCG1m18q{UMlN zlVnO;f5|vWF6sB!8#yW2{GR|r&H$&Zz3hezs8N-8Kqzq(AicoKVk&z{P4y=-`x~~3JulwpK`dpBXp2 za++hsIxqjsw~tEkzCENYFYC7z-}FjFrY_DX$$L=p(1>b0&0+mq?Vd3*LWnFvMXwlC7?DI#|{njh@?AxBg{@43#2Cn|ok1o$aS=1?9W zBFT6rS+R)L*p4Y)bhoIY{|wg-{vM_-iWa=S(F}M7z}28S1tbkL)s)qTY*3fSq|bt8A)WwKqD`F(cl}*YlJ_k!W0$DrxHt&x^jDF`{t^eh7_yp zg}D5^O$(v;Lqkw=3QkYZJLEGhp)OC9J8ua{=n>j_wv3NZsZ>d93e^i(3SM`O$cS9T zGvVXT)KNGwDt`t*5Fp>NGf!0ffar*!A5aC~@f$(sTBr^jCc`Vrzy%l^2Jm#E#;>01< zROBsnB9-d3JHMyMlt9u*p5_)$?O6XcTCF`3pXa*Di-w9h`O8##l|Ro&O7>M};?<=s z;_^k|J3227oqe&)mJ;zQw@;jZAJigkVHPwGixnmUm`mVE%vTscJrLde$%E+iox0<5%}JD8q~@%Ywu+H(MYf0wlu7s|)QId@Ub3{P?R($k25! zoLo4mG+*?kr+0ZC(lxYba@tY$rY``a8F+Q#)fdb#!O>|$n!{~Z#bky{MN_k+mhkQW z)=_7fsf8!P^I8`Y28KQT5Lqhn=L*c|gVL2Wcl_$1lu~3;nDr-l)ze29=DLe;=`Vy@ zYF^#1j7z8BYGplXcmJx7K`)=^zcM@SVy_zBl6Beua`Nj=zbHblI|pDdMDVM}TuPA@ zoSj5_Sb*oRpbcCryF6?CuFO50gx>%aagR*Pt83z9&o|h8I7>S|?3))xl3GUI(o%m} zG{EEar4J&@aQ18g%ZSi}gT9&cRV^^S{S`F(wbH25Ocq1vWv3jgS6RpOjmM|FjDTkC zwY#Ut;%N39e8=;4keLnZ2U+(ah%AfG`0BDL(6T!W&^h>eL|Na)rTvA0n5^m#Z(__Y zjZ%w0b$i0~2KO(L=sb^(P$i$mywdE)Wmj#^3-JdMLcA`Q0)O4!;Z#rn@0?$VFvU4P zBKc6kZDMc48o~!4%qzU2QfIIFmY)b+IXvG}v;G7{;QPsYUB?n6+l0JQzUK`%>e-sH z+s-yhO4G~iCX;e)|zwUv3blzh1-12Vu=NYV8N@@Vo_V2u5V3mU_3oOGhn#X z?2DB+MCF~pk3EE&aH65nVzOKFZaY~2r9W%j!ffB<8~l&bKmeNdkfgeh4gG?78sI{b$b3^x4T-T!9k`kCx{o1LyZ=jRMy< zOFf~q#?!SkRRqN;%F31PlV>d>);s2=*a|K1rcoL;Z2F<)uY#@j2_Bm@qe};MhwmG4 zhJzj`+mY(;&JDdeYHQDmvrY~cfBb`C7uY)E>uZqfkoJsScJded@hm@2Eg*>$P=Pj& z^6*}K83 zehZHHgJB4nC2sH5j?q~fq;D=H=KOBpx)AXGPSs;j?X%LJn|+YsMAvY1OHINyw(#ye zsyH)YZ`qO(Qv`mIOo-kn)*4{r)usFW-W@o&0K5gjkVIr6ZD*I_P={1fALAvS@mPM5 zV&0-zME%3fzt<4`9MA+z5h67Dt}&Ke^%~iz-KcWj@MoO=Q-#BGT4BxCct!aP*`o;@ zUo<{FiXioV`+Silu zU6x=3SU^g2UnLunt0e{cu0=5QYu>mz1V4oFDn)jDysIOAh<*cWkB61wz%PuXs!Z&6 z2vaNJ;nUlNrJlUHgHHc=N&!W71~R=#m$w$FADgG0h3g+R9iGZCQ@sEA+#XXn)PQnn zp7U@px5M&5XJX;oe_cC3<#nz#+@FdS4>P zOf7dkWd|N~1b;_R?V-Lb4euYx#Vt>+@PJreGsJ9uajO`n?E7&Ze#FNBkywUc#9D0g zIc& zHsR>suL7}U4|6G=!WHEAVNAh*7 zIjf%2MnsV1!X-t9e?|EyRe2wAwk(M4$T+$!R5a0I|R>4o1t+qy!_Kd z&fWsyV_K)FV)l!>R*G`FoAmFQ>*U~zdN*SAq?2m-=9P6#tnw7GpH^;3MbD@a!*pCf zh!;I0=;Wt3!awehIfGiXe3GM|b9lxq!Yhw9K-}pkzd4C&Pf=5lbl#jt-l(HPYGPRN z@B`m+oKIY<8M%R3po-a9xdnThPrmiN=QVj&H0ogDWz}$)MCsH{9)311`EtjUT!#*} zroguq_iAO(%WGFI-2UG;dV;~D2t1jUk3WiXLJhS#g0OEL?k^z-(v1yyU|@VacAFUH zb}e*NJLwnp?(_MIhc2M{;#LT~=bR}H_)6{G_I3>gq=NV4VGB|F=VlICP3OC9hdNFh z9fD_?)m!gp+)^rD!EdEGj>|JAr@#U>vkl_Qibj)zDFjSbOrB|3SO_^!Vr$R(!ik$ScJnb@jzsh_PLoqf>Dl% z*uD&%Zutqn9;bEP$tL!t#BCDhMVmCC+d z*Ml|->AJM|)ch2%HuAxtJ zRHzfZpda&kRLUiCgVEnW765?v;z ziv%A4F>|!LHNKm{J@0%#CEp6-3DDf^fq7;cITZD9)g)&Mwmu}gAk3M9&X2*3bT#)F z!8>OTvBJ>6L*8_`tNt2_pqk`J-OiW#!KoqFf_k z@k-BCTC~oz8kfPZb^l}f7I>lf13#dKT)tw&m?SX_-h$iS4zcVmZ?}^o8k}BeD&6pR zCo}xCI&M)N%pxZE<|^msP3k|h4kP}P!-l3NJ( zk175E2W|M(5zF$&pgNuaejrQJHzky)#n?fcSEb#FJAnJ}Mk_w!(!8>vvUa7Ac%5~9 z?XQxA{BE|A)m*BjD+>5&DD%yh9#Nf90cT#1eq8`_|MBgvBX2ZuHBAMts8X<${+k$5MU);6AF1t z4u8|OVZ0r60=jsO#q|xkCjXkE5~nZ?V%Ugz%c%;5760LGp0N6z zS@b6>A1%f!Me5J)oI3v=6*qpw!454PuHQJ@xwNUolKxvutO3`^$$u==h;-Ek>z!M4 z>BJAt?KXy%M?PFxQi{AL`<|!)6V&FhYMZX?iA0=cO;^3P%PN>J%xV7a!>cPeU3=xD z3Z0gLrs5zy{$GKER50**Ss@!g99k4n6a`7o+Q~=X8~q;l?U|fz-$b2Urcc(GZ1kXy|g24pcRPy7tQaskSb;lr{I-{&F zb6_>^>|mMXQpxE0i-5)xk@gK~fu55RCy+aqcP^x`0Y~8YGxtvkN;dP+SQLiDZ#Vl?BKvU6P{B^3jMq+6BKY;w9ci`uoJCa(W_ z_@|`O^P)%P{x2&fdCMGWffziESJ4>IuRcTikOOcZw8%CzLXrIxt*bV4Jxe-L0x-_n{&>DEZ&CqFk;Lto z;>n55Tx2{gAcaQ9b&AdEv+i2$63F0E%BZ~F)rmjZ(CTkG#p8Zd8MKLQ-XIp56cC>$ zyeUtP>&*q*iY}l7*h~K26;{{%{y%@%FF4DAOd5b7RU)02$E*lzIz{PPv!{#(Ct2@T z7PQ~zNz95^$mEEUo~*T=mmue$ns(1(!T9>ri_$dYhL(ehDWCCo$Z$*5>?kmrq+ zzSMSXWj;C-A6GA}n|z|>PR`dolGE7{DWx8i&R;>D;%L3r)DBLTV{`zQ(KvuAO|Maf zaE_M~h9qn`ox!tKOU@C?(+FFoh>GTOpC(@{^w0IHx9jxDAf(6K)0i4MFJ~6|58`dG z9u7)xB3CU+(z$tAT(3T-SW7IYIm&3@l!5Y*)a-~-3s$c#iMX2GT-RYG*F#%ot`DIx zzGN(#m@u?w!jgCGJUM4vTR&I@GLSvu6OaOB1yR3A1>U8hDyB~Vj45qqSo+NW2pA#) zP%^xzr%eSg%*by}QIxhVN6z^`|J zV@S`B>J*e{3#bL`G9!hpPhL^^1IjcKzXEsxv&p!BhIaZ;8b&{rAi=_B{%k7{q%Ycy gy-W=lw{}5e_3uu+?JsE@rJD$Vn%S7XgLozV57fx#(EtDd literal 0 HcmV?d00001 diff --git a/augur/static/favicon/favicon-16x16.png b/augur/static/favicon/favicon-16x16.png new file mode 100644 index 0000000000000000000000000000000000000000..23fca33e803e2422d11097afde71c48ca1813cb8 GIT binary patch literal 389 zcmV;00eb$4P)fiy0{Sr-54|wz5$Dp#yArF1Pq9d z5d8q13Goy3?|?)}y;#EVhRa=kclXbGL_|J2ZY+Z^kwJ)zm9XQ+MC21YZfuWc*nIn3 zsqE1V?6@(>AWT$;jAj=`v#Ug85GEvp@Xt-FKL#;_m|^wDDga7AuhpS#??T(!fnKYx z2H3+Xw0r>@7uVQ0zk-%8z#dMs14b(Xv4Hjb0qf}<#0;YqWe0@kLzK$D)<+PYpAE43 zllhIe_dhSXx%&=t@;{*0>L4P>lQZa*D)dSf^7s@)lnOHi%)=`}l*-6mHkV3xKD2ys zDa;gLv?CA^%*WT%f6a#%5D|=aqy~iNgZB@WoyU6zOJOd6WYOEXMh9kZpajg`0J%m7 ji`kJun5;ic>yGmmKqEVxY)Vm900000NkvXXu0mjfc>}1B literal 0 HcmV?d00001 diff --git a/augur/static/favicon/favicon-32x32.png b/augur/static/favicon/favicon-32x32.png new file mode 100644 index 0000000000000000000000000000000000000000..5803b2272a8f242010fdb0cf05debefdc368ed3a GIT binary patch literal 649 zcmV;40(Sk0P))v4@WO7xd?$P4Y@-@?Kfez!&mfLVlmTd=l~l=xkE}Pz8V{0JH%B zBTBnlquQb0KPF#p8$>fmN8hGK`-&n$H4Hu#c z0MMjXCAQ*js8zx4W+i{%$WVSkhUMM0-rIDo4s27TZ0^mRZ$)jTggZXwF z43kI4@d41%THh;1OKb4}@Olf$%q#)`YN-rrm!~`XGqcF+t>^*R&Y^eL`qDD$51)8r zwsROgKq6o83Yu@fqq+Ch8z=IGXaPj|$n^zpo?M$(mT@2eT3Yv|d%o$9>FE<1Yl3?5vEWjLHC_e2E8-`rWv{s3fB@Kq4PpfNj4QWL!L708u_|I(z04 z^Lh)lD_5muR05zYzI>xkU;X#I(c-EvP>*I2jli8<&z%3ZO^6$tmPtPDAbHpPQ&nTnd#@3P6-ke_RSJ?nYl) z4wW$!phrGUL-XLLG=TZ;N2rXU0HS>4<{jzcZpe+>!5^+L08u{U<+d^aeX$iNV;}&p zHIQWIkfhQ$vT$#po`Ki?;K;4JogdfiT;DZBfT91E54|xNdTZ9(oZ1mKr?!LCs`NFd jF2pXx)1IG#(4PMT9}n*>Os?Fo00000NkvXXu0mjfgeWVW literal 0 HcmV?d00001 diff --git a/augur/static/favicon/favicon.ico b/augur/static/favicon/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..c0f9decc1e0e8cbef8e25a1e2e19ec14f1ef3c9d GIT binary patch literal 15406 zcmeHOX^a#_6z(~8c6Me_5P{{AMOHviKmrT0BFLpH2=c=izz7%*5YQMof~*oiqOyPy z2q+*d5?Hwp#c;VQ!g3f5YCO>pK*QmJ{$0f_-&Z|vyQZgmrl)82ATr5IbyZir@2l!p zRj-cGG>_)f8aC7@%+)6NH7#G$wA@@h-m#IUt)aAT-6HSXYg%~|P3uT`s0fwN(9sB2;|{Mt9?s8o<@ko;prAU58~xwOcTOb_up%5vh(iT9rJ~PG zSzrR2q-y(;tXjv%w*WSY{N`#M&_3NddPVzOuNc_d=jz*>CmjQN`J@i$GJoF)zO-{Z ztN3^xGfW@b!Ee=E`?11Yj^it4%l4J}p^MWwOoV^nUD-ae{JlNw=T6pV4h!y*HZIFU zm-&4n{6CLpi_Uqv4v$9@CwIioy=I*p|8lE7C!_741G@Mfhl%i!uXvi#hEq**EbFlf zUl^;;)xd`it2|Ed&wLyfSwVd)Y|+#sE+37phgr9rBX9P|c1Kz0Fy}$s2|n_vcG$R~ zCYs*li7Uv1dPA)2j=4C+SM8AXIt{JtE?Iol4uQM*&7de-8?uzrg}R@Je0Fx1EI#t9 zcKGV^0n6`+d~+rTs@vYq?%d$3cG$74$Fe`6oo*w)3U!ZXcW&@eMzuq$@49!DFuHc03*PkTG6c@nc*Lo!UR=SM4x6q>0}@Gsc47KFt>4EWOVqvOCt7EI!H= zPc_EFWiK1!+LBpD`U1+s81X_k_^KWD?rO|QJ!x&{)b5y*8+?=nM1eu&;uLeHs{c( z-7$|;@KrkmzLW8eaXCx{A37$Frgbd&rw}}|UZ{-m`~jjcaP z_lnr{Cz-HZ1D8-(8|!ai0E^e(^!We%VV{JyXti-94)#eXm)yR6fWloQHT}I}##zFQ zmxlH96_VW~BS>f$I_j54u@aKXU_cY_CG)#@X}{N3F9cj}Z|4!gOj*ajiDxMZIO7*4 zLHz2itvzD*@}M}iH!Myc%&u4R%f7JW)=6JG{~O_q?{FVUWd~~WpFXl)6FQwcY9rsz z)V7g2R1)vuBwO%ZHqsiaHt^Gd0UL?flI*LnExSGAbl|012W$zPsqN9Ht8j+$!=CzY zJKPTFo9NTvPD0Nt(hOJmiHG0Y45~XW3kRuLvX?#f{JzS6Kfouzpr|{jmIDR zJ{V8oi$btQocVM>{Bguux0`wZWBQW;(IBj200(P+t1}qrPliA8V{ZfBiKTlduRJGm z!_}$`>^ZXUU5zp88T+mn?s*O_vpAicn-P1Qbu&nZPnj8LC2x}6pM^*l)9NnC?j= zBMYZz%5yB{ju3YMoV0~o{%D8&tFvU^WnA<uVbdnL?2 zT_JSh{)E|9tm3Kg$DID+_zdCm8Fq_vxPfFxLvRnof#B>^tvBvWK<6cgK1nM4VV`^6 z&Wf6k!DZ9ppqz6s?<5k;-75bZvo)#HiS6Tiq{1KNFVY$EJ>8>iXyhQDxa{Cw70wEA z-$Ui!jqVvhraKgZ<)6nY|t0Y-#v_ zXxn)yj-SkY&0)IuoBiq>I^QneXm3L&w}<2zHMc$dV+R!`B=fqEDzUDAp{Gr#% zuU@Czg=FmYZ$yvzPW)spYYx-NAHMFhqoVz4?8j6ncS0~{_s@@>FUCEb*iSa|=6E{! zLnrRfVol7p(dY(El$99$_~4q5T$k{BjJ4$s{AA{94%5va{a~o#SNq8~nlmLJ@8w`l zo;@iLwLTxx$NKJ(nWtSm-TZ;G&*WF*j@l{O7r}hcucu)<@RPSX^pk6ozx1m+8$7Yk zRkk)u9HV>KA!U5(L3y0|$+gK}`qks2{py)d1q6icUi!(D$ElxOoBYuS_N>Sf@Z*?o zC)y)!M`t1&mwqzkL0&s={}B= z_Dz(hCizRhI>(p`n19Qr8hxMS{A9a&)Fgjk&ZqtA8)!|aO3{Sm?_$&y|6liqqs}3) z$Cb{rK~wu*>*q(8aK4gg&(!W))*uEhRENr3T3cKTXOQSekvS3P z*M%e@k`j`CPzT%*@KD(EjD@GM+a*t*3pX5*kB^Unv+ETP z`wQ-l3T`)?o=+d!hLAYIo<3pX8$Z>5GjhjB;P2UtakGBcV((jb7I`+XG>P9qQ>)dU zxB0aRd~b{N8V(5e&Pqf$L_Zdgdfe7CG`#E(k8Mz5im$zXDg0dzA@m!|p`H)eOWT=k zH8eSPGB&8waloPaP>^zn(H^z_Mi1n)-(ZCjpq6S^dJ3Y#t+ zM0yePR>YvQB)-NXg!r^dQrmP_c4G%Ujf$fJ1KF&~L4bE7}LLI@EJo_c1aT1f!8EBp>+G8GsGFjAju$N-kcbO3ti$pYlz2D{7=a?uBCZs_+hUMxg ztqQazsYrI(@D8;c!_sX?yY3%A=tqkHaX@iQ%w~IE{boX8Y8ZCq3f6eXb}>Ydi`_M5 z-sx*UwcF*GPS5__YNmZ>Lw9|s6B643gZ`EzUab1%PK?$0(h40?NqsX+8BmCrUvjxa zh`xLVU5=P{B2I5ooWr0WdYYNnqo{|9f9yEyJjirdi&W(zA%f7CcsHHz*@8=3v=5+< z2KE`L-(*CY5uwu0mc*7eFI4-+74tD_KK3w$8S!;>Igo5ekL*n&&MVBw zKw_wq12aLiJv@fKq%}CwpKi$KLK=oV#zUcH30Q?V2X?!()YXp5Yfi=%(dswTsm?{i z`tf^C57|e9GQH=i?&w!EnOe`qi9O6*Egk)JSTm{WiFCKva9~8b=vyb zW_wnMzE*FPxMnhxcH(xl35v~V(Ad4n((P{mjy74wsbG~(qNb6ZK0C+H2X^<0A+$d$ zBm4Ucv8=`SP91YoYuP~)N}bj89a&}`YTvSgvKw+2Pt+Vg{1Cb4ZCj!?N)>b^`<^-~ z_-fpw;6{$lTuhTl?G<)}W;^jK<$g5UbyKl&tgM($@4H;Cn3r8Csk5PL(w3X$)sG=0)oB}K*k3|_w%2EHrtt#kh%>3P0oQPF{XUcclR3iwXYy1>5a@Sngecy93q9$Yf_Nw)E+BPg*YJ5xm z26Uja=XIuI+lv3tp6s<#CDIn{o!O_D5a0QQ*Q=gX0s6V(Q|y*EdsHMonSU^{>u~zj zLactJ$)TMznpHWZ(lPsxE{R|4i{8Eo5lTVmg0>5yW1n!DqpJEP;{{Vhp{&;-cI}@& zWMY2UyI9lo6I22s?*+^s*o&hF$#7U+*G7o#6W2-BknSwyiCqiFCSxYv{HM)VaZuU+$&{6F;tI zTnh3ilJd|G4L&jMG;CKdgC0Nij7fcQ z`*{|Aq+iTr)s1f6tZ#q+B0}5A0j#UvhFLWvRwkM+)z@tX+(sKM+sFpn6kR^HqG=*?|XNopTT+ zX5^|f^rx15mNWXAp1m_dAGyq~niVJ)&kd`7o~m-gJdTPG+rIkaT@=4A}xE>NTqJk#)YH=$fq0)NhC~?+_K+zS*@xDKxE>n5Q~rQ5AHG z97Dft@H4dL*>WL5!-kaK8`Fwqr=3TajhWvGV!4RNElbCaQUuZ^s^kzV8;0Y#zh6|2 zw#{s4N1;RUWG-0&^~f0&A~!;pEnzk`=-2A7FVQT4`rOzYmoWcSCq_O(-OG&B%7ntN z(B6F8Yf~-}nzqV~WFwC2l-_hA=`tY$4w-~ggPcf8j5&Vi*nO;MtqAGG3tZ4S+r|Qk z7*>?bH||EWc5^+dzhJKwVe40($}E+SB!VdF7M;zE2oqcR;{9g1L1F_bC$+>4Bd6=P zVAuIJHHHNx8yFppQHUAfMO!4x-^}DFyBWS@?5Lc^+Dh`>_T%K(6mqvE%Eo;5kEwiW zPqYTlP&cdjjI-?zP#Es~}GaouL3g^R4QU>M%u@9%)C-f6# zqJkwD1eWUDqBb8O+IrnnboM36uC)6usUw6XXMEK2Yc7JftMl5#)Q>ceAS8VZPnu7S zIVL+Ri?+r65x5KcB&v+RN{!Jwj(m5Vim(P4Qa7jh_?%z!q979zK^PRC`F7{S{8*~| zIsk0m8zX@G8WZ^^+ctJN9Hz&`P=^*y{IcQ(nUqp*orx>YQ#qEV$trD4@0YaZK|zlce3)NFkjeRF-X|-TSenEn25_k)9LE>3xfY<8S0xx z`A;StlU;R<$lly-kNz-YP*d`_{ZbZfD8?pC>GFd4#+;R9=DbGqg&p+mL{yc(by#@p zv9$_x^CSe{bnoTl?7`)@38hWOYb)la{uC|7yIio$*13_I0cmEYXK>I$38{a7d^-h` zQp@Qy>qxral}Jp@FF$USAr2_nsBFTldv|Jj;S&6V4zt1v4_0IqN3yBiRw>-eFDH~x zyar5uxteD+XG)vMH8UQW?zPd!{9pmXSdYll7c_^~*U=Y9v)H|SVjALGM;2+U{xQQ>|Yp*I#^ z|54Egv*gW||Gg!}fiz?dX%hmD8p#Xrn+tveLH~Z6E>2%pJ9%lA&hy0*H2lnXer4)- zx0t7z<|We_+U6F51{Xgf2)&@HAo#5dzs-`V-GcCYf)Ad<@9qEn{iEv`t%k`G2uXcR z#wh~>x_}Ou6zUd!*iI6*j+3U>8-1(L8qX>hWo|kM9DO-XV~2+t!hG6d7^KZ_)!)z< z-K|e` zc%KvAm)Jd`?l$R5X>T(xxSTLy7Q>H^4=Gl=)sy9EJn0^Ef?f{Is6MJ(mX;j9dfSOywBh*{U!#Jc6tfswG-K!T# zr+O$x`YiTN(d}ma?zf*d$jKttx0NL_HSxf@cPzuQHP4-^t?SHKt5TmhNg0w-joTB~ zQr!#3-2lhkkU5RlD1Ogw6ZSPSVu`N5>Let}9%-U}gFzV|tUze7N%B%VXA?w1Elbl` zBP3E*J}?6ZPk`zuo!?8pIjTog_n#)zv>9zA>=)m67_@rzjnhc6EXUuaV_35(G=&OTVJ_vZwwb@r*Fv z1JJwE4p~$wY;DUm{u*(B`&W*>PrbEYoDZDSgF4c4y645YBw2G!TDg0FMTFR%Gn?&S za5pgl_Pv#M4KJDX#^JV!YZ%RoE4o$HKY&vp?Y0Ut(^p_4$E(es>q=wDfi)B<_Wdu( z_^Vf&xgEe^T{c>4+G#wZo^OA7h*Lwawo7DF{GPz80^FxhAr#k9k<&1HNtbDkc# zZ7bE|n^3v9bAw*THsJqnpl464EZu9Mqoab({)8C7yvV+y2JWha0hOkydVo|$)<_W%7t9&XoA<1;4@b?;Z*txjXCQ+bf4vN)UdOIiDF%x zjz7CZjE)%k+c(Hq6ds@TYIm5b+3^KZ{vX^Qqv#TMt@8BUrWCAZSinEbgYL!ODkkH` zNcH55q?qq3wJ(dw4d1_sgN1xiXF^}LgpAbW4Y%SmWLP;%dcKQ+dbwYMjW+&n!JoP> zYtbLIa=k9E|6!$FrYSJYi9w+Ogu4Zfrz{is&dsv^YuI+vxDx4wA&IPi(S;F`{IbN3 zq>c-`J$~@*jrPP2w}TR|Ru1>FFb9IwRsqi_qqfPUGcm=JzCYt~hIz!k6%O%O=H3UQ zAP~Cm1)>B8sNMN*y;&y)`oG)rZgSN4k-&l8$-Ii93r25tt$e$1zyEE{NaJ>HFh5n0 zlQGCieo)SB{+qnI>)6ZI;kLwH@(?c@O!Nfautbcg-*+43D*67Mad!8kuuX6=IZ4I! zWw5N-QPrT*oA*szJ-n$+aLNpzlK7Q8c$SE};T_;Myp^@}&=J+3?Vu7RfH4wEk6q28 zj^5-{nekM+TX*iZqlSISO~M;K0QtQ~-+EFwDG^SkqNi%3az?_ECjB03zgjC{Bb`JV zKhIN5EL?v+r>~5xaz4Js=P>QEF?^;+I4A~`irQ=ZOy6N- z^26A9Qp;H^gn(q_il~{@l6ZiAxyec_S(kB;?@2s%1wa#bND{eDUNub| zAb`U_=|j#ui!qR*H1o^L>!V9I-(OQH7u|WtqeKB7+QCD%+aom)`dX5d8Ua7DwyLy> z1iqb6+Vy(trdy>j&d@(*H`;``g<0u^iV2xruYI3$JO}|gGbgJ$!I}UMZ+=zPvat2G zkaz#2-<_tt{!68zn9CcO(2e|{;qdIR-W8KAuL4UdpIm@&u1xRaGAVbjhApzD@nE%_ zpa^|MzBrZ1H^o`!@E`kIlDQuQizD$tmxCT7>iF+h8Fdvvk^+pnO5lTa+P2-w;@N8wgNGedFy{v?rA5VXFsvPc9+5?3&SG1x#^Eyi**eY@N=4Zm5Zi0%2of#^q&#+c(G|K2L_d0 zd`;bQwX~}*=LY-xq(fF|Wib_^yEnBEns$RQ>Mvesy{P0`AV*WRq872v--lfaENF91 zHklA~KogII^zJZ*)AONlkv*+=bC4#B6a#^D*^c-=k%x55=?uwY5riSvgK#b9>KVcM-#BsEFFU(05WE8+9awNG zH@!v8r!;X_VL=2LC#PAWH$6}RA@ZX*ddI@x@W;cfx?N?W?_at1h7#Q-v;-YBEG|w| zaNTt*q?U~M$#B?&U5NhYdx2y7;qbwT@DW5CA?RA;lAM$aaU71v$hulW~m(tT`2+AtFED+T>|H zfD}Rw98NpvRPaLYh!;0Fa@WY;*1PD`5SuW5VMh&z9T@b?@h#Qque|qpr`f&eKaXBh zN8934NPVn%jZF#e?YvpJ{R>;af)4>*2tON1cjtr>**DcFeTgYb zAs#FkaLO(K@J)BdyK7wXsZuigO0j2c8Tke{B)x*=w{J-Cx+eQe&t9`-nJ8(!BKGUw z99a(J{)8;{>+ii#gMxA>aWIZz#)p2ygT=K>adIE}I@jR9k;47pG3c3m&PYvX0{LFp z4K>Z5Q3^J4>kgLTsm^Nv<}z!=Ux=yIPrNyNle1*tkKd!F_1syd`nMmq;C2i;cV3Q8 z5y;5$9NW}w|Km=9M~n$4fRJbP^7&5{S}Bu=OxABWC`#o&MiO9Vq(Nv=WH6Um)+bY`sj$S6;e7IWSr(%FN8sRc&S4 zxDQ+ya7Td;eKES!iWgEpFBA@!G_~9OMOeUcxLDAeNYKx4Ue+u5-fC=d4hv>Hjv8;a zVUWCiF?GwUDocpKd5szoIx%vPGz9!xE>{z2v>MP(<=p9=yvBE0n2+(`N{35!-?f?h z&*8E|XA5r3kNdQs>o*xza8^Ts7oiqBC2jCjW zn`M|^I&z@P8^D<`e0ZLR8tX+)RUgnhp%%hr;D?dZTgYKBkoMUW>JkACC2Lb| z0p8dq?ADz-SMsA(0#X?#J?;(kq=IF4#LClWf7b*!UnwbzVTNaq5y8_!p3cbHimUpE zztpUU*$)sEop&Me&7E$?E*ApJ2misL@B(l@WG5R2w_NUT%w8Ej1#&nCM$Vbzc3|im zoyuAAUC!Hb&GzIXJUpBOpsMTcUK{tU38iqNd+WLNNX9Trc@`uAiq!UJ=;GxkDmNYn zJBon&gWNCm4k9=R%hP4#Wc%E6CuG9GC%@~36Gna`54PmOP0ENLcdDF4DdR5cA8SY& zNaN)zp=jU58u0+!t$Lndhb4cmZ-B@yj^|fhEQDO~0Je6d6__HlLDdIaT_qM6y8p+8 zZhLX9$-ph*MG)Cq5>&83%?n#E^FwJU>bD}eWluFz)9T9w5b%K$!o_iWS6#s+c~MiEg>#*pB9nhfKwTMg9xIZLy z>?D{6N{0L)oihg=;P&PArMFa-i*JHAKeq|PXuUd40T+qgiR``rNp)vc*Rz=)+RmRP z$!#?~;O$|(lT6*r?--oL4m>c-K3)ny(!D9^sBWLdh<)kn#N2HR$;!l4@T;5zzEj>_k&spYSAqNYI#~n zym|{i)aage;{4Bm{|2idq>iWH(zzd4z>LRavGARd7=8f^`f5O9-Vee07tGz%g4bAt zShk%7oj&=KqiIjI3O__0P-}LFEsh$eL745*bc`_g4Sr|_*bcA%24)DdAmy}`;cmu| zxoUtxXND+v4DiayGFR0&d)_VWjXQ(h5n$LG7BAvJTMeLQD?`Va&6}7~evMk|W0*P6 z9~{7lfnNASJQ%8BY1|K>O#Vf&1y4|m#C+vuLMvxpa1=b8vEc^gJn)jr-UIa==AG!r zV_5i}A=?K8Ylw`fzdP1K_ybZ6WW1s!%(u1^CF=`5AO!sI7j`;{-l@WY$zbW9VHuxf zD*G^pqeeUigEDfuP0Q|!327vnEcWl|bOD=c;6v_l<0_&iQ$OYa4uvOwf5Du6d?6g{ zNQni-->c&b*AH+@QDH&@CqV>?u(7kaicI)Mztn8JJ_t`ZrGwq&^gV**B zQg=3w{!%PuB?!pH(Ca=ww+=t9@bUolcMat8!}`y_gA1CJPWMB&g~U^?QkZWOCi!o+e$XdZ zN`44gY&+al7c9DT8-FlT_0;HeI}X9`uqKi|oa4qm#|Y;ab0(Aw+@q5;UawSz4f7$+ zk2ffaJ#S9guJU!;C;!mdVzURoXx(kkaDglYCh*@8Q~I_|G42zg0N?KKxBJPD9<{hr z+LFAU>Wt!Gc@3hV>%h*aUI^M;j_)q+MuL$eWE^Fjk(&C3FnrqxSoeR&zA+h;+0-^% zfs2Fq2C{(6CA^T`7Y@YgP)6O7HysHV!~mSzSz_`d)h|4nP`dbn+rkQedCz+q zw?7;Cj~84g%eT*fK85@Nn@aM;?fdz@#ww17XDGd zJ_`utAg$2-c1nEcMjki_6aU%!3=OpK30*G^po=n$NI5_EKT;D%!`6;7qTRbcG!)Sb z6#D#bPw-zXg8co{PBd(>R%~t(I1hQrgZiYPL>&)BA2qgp(okglc)ZRsaM`wna6}6= zDYcK(tkT__Dd8x65 zKd@^)dea_2#BKl<6iw75)5f8XN zO}>mYZnef*{}v07_R`jTBvI8`pWljcqsXD5d#Bgc6BH<_!ZnRMb~g(lexD8P`GL0S zj9ua{8L7|JIhQSt?*c$JcgDzKy|kAamHa82U-dvCu}7NuH#!><9v9irdarrkSGFtoZ`$+PknK$vvWp!fC*fR<@v;iW zj!4-_Gk@1Y`xvgqqVn9pkqGNB%wf3QW|U&r1(bY~1?9Yop>yNOm`wtV;w_|+uV0@n zxWEt9}uM!oRUjjtOn-*xZhVbHt+@$L!-x@Vz4hlC(g+2HX@V=e24nHSPN=L^wl4@0u+f9h zPY8cuW723iuRxDd!0p77xnaFMcdz6T*Kzm76zkr>QBt zZwrYsy-bL1s&EPEShCGBaTW>kJKb4s_l$Kj+5MR9?md`Mx)&_LhLa z_T?%&CH^y^p1nT(I>P(j!$P0~QZjwsDHHqzYYtT?P3}C@Lih$gL>gQYm%_odEY>EX zd;7VpPt%>sFmeVpiBF8Ew=c=xdOUkC%%q^sHR;}dJ8yopAJjviVdgeNew^awdg_L! zg!ch%I&WV^b)tTA6c2e-V|;tR-643jA%K$KI&GG&aS04Y9-_!D#;&K~;=L7cKgl!d zxBLxoY`ReSNMGtq=8jbZW+>rk72}sGRAVv1|1Bpl9Q7g}+W*0mk`EU}SC=+P+T9xA z36^HQv=JjNJMLvV{kCGt?LV@`3|kfiL{xrt0$<74@g!W-!woJwsGIy`DwRD|0dfb= zgb5jj0OVN+kJsh>*tozU0rc%8N4_7F<=7yYdBdK`v=x3+v9&^ov|J_q(u~ZIaPn_P zlGBRwb3m;|YYV&c8lg$znE?pPD6JQ~#j@nxnV@{AK%JDCp%Etdf8}`Qkn3x%tFfL& z?)kRnfSVX1v-W3niUF<{)G2VdJ;qZ4)oSg9@(Cr*Fz5qV$hvQ;x?Bm{&K|v&HN}h{ zhVbZnOn<6oDdQ53+XTk^swq%{UVr;vXzE&gF7^~a7K6mSHlEK~dhv`-3_~_gSkYij zDnENA_7fS`Q>Od#AfvA#`OEj?%Z8B6KyetbT0e9(n3Rq3X04V&QpjjPTAc}kLJbNw zIOSSqN{f7!C-v~j{$vjpWDY!E{MNgu0y)^dzQX&BG{WP*nqj7mxPGTX!KUY!{$1!W zY3`HO)hbHdh3GpQ@H6X;XqGfW!^1U2TO`5&wH$z zR?~2Wy<*{p1Ah&--Dnl%DRT^iE+(6>cI5-=TprTyY8w0p2dFVWui@47Awokn$>`@I z?y35?T!@@jv7$w>rN5m#k{+>FB=)XWDmi>&oo#1#h33w#rT>qFI!y4>f@T%;bzrcd&8;%WChCo_jI z`sVl{FD3#r3u31}^Ds#~0=E^2vo7iIS=t9@1~mVI`Tyvffs4NXN0SYOSRP?rV2kxU z0!=#k1IB0lnzI$hMjt9rT5%HP+Y!3kov?g=V(1||;VA4(aM+<9H~*+#hJ~Q=uRAD* zJ}HG&ys1pFLT)sfO@+ zOkexMj#Z?@DUNAj^DI=CPsvakH^Um~iP?E*R>d(mJx?q`kFp{9 z7!?!e3)}XHA&JhQzy0ftS>Z0~0eeCnljbDVV&=2e1_zS<^-Z<4XT=*QEl5_qx)Va^ zbW@8Sz5zW)W(#A``9at(rfMVwaR&lkB)Qb_Ho#5)cKaKSRvN+R-pUdX?vE*~qPuy5_y&GO| zhjVXWN$uv2_e5(U#ywIE#iOch>{w@*WHtX6Pb~X6Ta2$Qi3^)&tL`0{1ETt166lWG zCPkmoJt|%m$fZFP)mf2_QtW@Wev*?Tty%G>29I`UY(svDL3+i^gSvj_5xSAPSm)s! zHD)D%OmedP+oS|=u-&nV>V+C?u_dXs%j+I;Hy{~f#d2>9ZZYvc@^$SVLQT2ff8NeM z_P3_pVr2vNzCJBF%m%s9Ni)kR0_9hzN zSXQ+pkFu^)=;5hF#N*}AQhD?^J+gA=4y+;%BAkczy@l2t=IN$!c$?5xdWMbVp9xM4 z=xeWYr)xfo>uug&+H9j#3+3PqbkkH`xgydz_fb97>&~Op z^Td_P68CgjWMuF2t_FKS9Bb6(li2lj5TD;+d={FPkb`lqmKOa-cl$AlZs>2ia0qJ} zrh2)8IFL`v7yMwxXyL`FzqI?-pd8JP#Qn#;c2tbq&K_7QaTZO}A}x$BOwzO1I8i<} zPGYQ(<^IvLQ_t|>E(ck;qSL&f>2CkfGLHIsuTRj?srLbvq#bK(_iix}I(m-%2qfOy zqhyGKj@(jDYAm4CbjJJf6V+4toog|8;^KuT+U1&|Qs{H$Gvsa(DvPn%!0n}}MqlRX z#L=Dxr-?|^)VMGq)X?GQIcYCuwv?!Tt^O-?|2N+XGA>$LRKsr6wLJddR@|mnJ+F3( ztzF3q&0vNW@-Knbj02m33CKQqiwpJe3QQi{n%1|ZfYp~xYh=Ss^$@p>xEn+WjImb+N5er@Fz7&1qU&jX3O-Ut^p$}(^U;EliWlrJNZo$=s zm6tX`jM8V)6Fk+2)Yp@t=%yW~C__rSSE{)DZIAPyc=hpU(uiyaO;s#L{hGJzMzEwB zaCzOc`qaGau))FcQ{A1u!!voAxUFf-?%AUf3G!o)VsT=+YhN&?i~V^kVYq1gG%M61 zu8#b~pu=aUzF>CH=hQo1t5XOripM4{-qFXdIK(S%|0uP52pg7lHxM%W7j-1U%>>7Pdu59w#mHsO5Y6-6f{sYRXAtmQf5tJOL)*0iSFyi=-rSErx#6X-+Lm8{z2b4z0SXIuDuY3hazuMQpt zFlT<%w7`Y&tw9Up!u=sa_d4-p`-NjwxTyQnh6`U6v5l*N6|OnkiEmDa+W6b+W`FQ` zn5re=IGSEL+N{2G5_C4p^V@y^?M)B{p*k(aj2sM$UowhPYk!3qD|KdwnNG0}MvXI9 zo8(6ggugjSQs?o0Az?+8mFSm}shIi8bCME~-kNmW3cMB>+Hs*WB6x>EouP1fo#!aw)izZzU2_21C_&-PAy?#)L@n`Gc=(#Gv=WZv^UNGjBPH%FyDX)ELV zr7w@WJ4=h#HemA)CyqRZkE+hOM7*&jopq@+$ei>B#B#1(ehhQa_Sc*bF=)&?{?_>o zC}W#smT~C50?kA89SBkWzS%ZYLJ`5Ldmq!4=+aLd)wWcC&$7~p3ZAOGNPN`RY>2X! zud`B^x}_gJ%vwNSdX=#`9UQD%vR-eVmCk_j@hiVm@D)Mz?)A^RR-U#$+!R@YzQkN7 z{?PTVrci)k3kMD+HmhfAUYI$E&DZg>4Hcjj5fcqRr1c_;0H{}*U1UK;=a literal 0 HcmV?d00001 diff --git a/augur/static/favicon/favicon_source.svg b/augur/static/favicon/favicon_source.svg new file mode 100644 index 0000000000..140acc758a --- /dev/null +++ b/augur/static/favicon/favicon_source.svg @@ -0,0 +1,78 @@ + + + + + + + + image/svg+xml + + + + + + + + A + + diff --git a/augur/static/favicon/site.webmanifest b/augur/static/favicon/site.webmanifest new file mode 100644 index 0000000000..45dc8a2065 --- /dev/null +++ b/augur/static/favicon/site.webmanifest @@ -0,0 +1 @@ +{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"} \ No newline at end of file diff --git a/augur/static/img/Chaoss_Logo.png b/augur/static/img/Chaoss_Logo.png new file mode 100644 index 0000000000000000000000000000000000000000..17da771dcf2d02607968c72f71ad47fa51323e7e GIT binary patch literal 19432 zcmaI7byOV9);Ag)g1b8e2{O1l3@(FvaCdit2X}W5?(QzZVQ?pSaJO)I&U?=J?ppVI z@BGodt7cp6`mL_&y{p5O6eN*96MX*k=@YWFl(_PzPmr1)bSXUS$8}PGg6HFg?;@e; zqGE66;%?|<`bosZ-pG_p+SbtARN2(f#M5!il>gHwC|XNZO&3jhIbLIXTPDMQc$hqF z9X_N#ec~7La40=%FC9G!dZC;E)H%JBXQDSW0<0nW}gxs2Y1& z8}pb@2nmw$d+>fJur+ltB=fMfv2*725TN*%UfvJ-pKfLfvVXC-SPM}6k5iiRN@Qa8 zPNrlWOq`6yEG#T!Ts%xH9NcWI91LWv02U5r01q<@fDypL%L3qK1(5ywNAY3J$;6CT zSseIpTOTa}3JVt(2VQ1ocXxLtcQz(_Cv#>N9v&WM04p;qE8_Drz8#`G#xLDfTk^Q65(8%7^MS$YN)BhQQt;2uS+ByF_OdkPb_Aqo{W?=&SGo=48 z%FF-%v)bDJm)qGz+4TST`+rsJtm^4t%B*baZ13u1{4sH6l>eA=;1zQ+HFU9eQnk0Y z`A-y;EbLwEoh|Gg$i&3{6*V$ic|&7MyMH?A{v{|$tVY$`1-K=Gl6$|EllbvL^q#FC71?Ec1sm%>PX9|7*Jc z)AfO&f5`uW_oMM&_?y~&VBYBi+-3au#GgJ%sY{ECsCuma(Su7{A6)Tk(X~H64D93N z#0#=4dD!%(x&4&CL?tW*p-gs*qwPm_OVucRlx8FsHWLZ4w%ppn_l?=RrK4vPtn0L_ z(@1HlvHGT;<9WGZ2`t37Jjkwr-M!+5h71rULxYE&BBvGm=l)LNa;@4h~a|51qs|6eMQ`hPVe{QouhUo(*ZrT#Bw{x1gq zYX64g#4Gm{x-d6x{1OxPk+c zH4t=Ka21 zyXa$D{}q)sF)|WK$@D%@pvcS#ZC41)F#bMmA(3HHH2(orU5zUMflw-Iyeqjq4MRI*sqb&cxp zavB_APif}I1Yr^6wS}4PE{|XIq=-a7_083I_jrWV-a(ZXsbB>4l`N*XKucXJ+YDB6jxrFP=f5zvt4R2+09<2aZa6yS~P|rGfCnoRmiiJ#Rjv zu_aNz)ZCZLY@RvV*e||*d!B3Uvb96n$jcV0lc(oqoU&B1YrWKy6#ONrYr5kk1T_a2+-X+A%mH($a0`3~xG$7ei3-7Gt}DI9zIy?g z_hAnMs*~*swXbMn*))+VSTC{^L)sA+i=2>jAMFblupruY zN37M}xRfWQ^LOnKe@C=xW{(SmL#Qu(IW`kc7i1{F!_UwZ3y^H9v_I@nEgGdJ& z`qU+@Swr0svhw)sEp+T8*aADr$kVEZzuOA~HiVv^rYn%(rxbTo$qD`IR`!EYYOG)% zMUX>?k@Sm9b@Yf}*+#d1 zasF6bBye@3hYW1W9M*5Mg@_rLZs^RhihJw2{p8xGY@B>^jzbbPGOMbUu7s@r`kU>i zt-YkT*0}E(PMQKh9~j9CT^))yzYPt!?6=N}RQ^W+uL%wc=9t%mR<0ouaeI0jvk|c{ z=|~8LXgB+lyow-T92?N}{tA1YdjsZkG|9(gsMhaMt4u%?NuCxu*ub9k`dqRk&iQkx z^&87icvlJ|RJvLggOlV!j+3{=uc&}B6ErtE(&(XCw;Gr5^IgIkw~6Q>K!+cm+uN?S z?(1Vuos)qcg@XEL4}o}+V#LXcY+)n{37rZK*hXVfE<2_RJZUA@pN8z)4l|rzfvKn z41=CM3zPjoUAnv59u=nvgYX@kuf|7&qDA9?$Wfewr;rY5|w1)$;F4Tp99!LlY(&dCPAS z<_%(>J@}VY;KCVS6>3oe%U<{^esI3%Y@<>N2XHwe-7bvy8obuJ-aLI$3&8lJE4a!n zGI%rTLkB9Eb@!(Z;=66e%vIs7r&}t;?`el4E5eX=^uE<{_ z(-I>i!Ru7uC>j>X*TPX~s_=NJ5a|}7W)hJ}iN^zYNRqpjld#QOrHG4Tq&P4^9jtm`u zQz0_Irpq_G3~(f&bYhfCPmX318%b<)iXlPe z6k}XY^F^?O|2Rd8yVB!jHqZM(&3{bld{!8ZM$Q{C*1ij-eOv~#ISGGJ0WOs}Ly8Pe z3%04&PEBiVRwMRF4>zbyAPELP4~jzR%B->u*;^^xP2B>a;9>Tb9ZWDq^YID<@>Ngz zgeV$tnZPQI5Uc2B+6k4j8;haOcZ(8lu&0JAQ5S!vu6zM!7VDp!dzIyfT`VV5eo+m` zEw8tZ``ofzpxvuySr`z#`^x~@ z>pbxo27BC}feXQ{P9lb@{4yCbx<`9nC)smX>r?oBym)5kUt2ClTp;HSVB=pzv6)nD zwc>I`td!_135=77iT)bg!~?Xq?d4oN;tOeZ_fIb4*&I-7rgEWAnQKX80zwBGHN1Bu z-(pAVI-6nF{P-|a1)q@qIt=#Vt!#Y9Dv_enw6DJAJCWE@${b&M<2BcXFb)hO~DJ9dOa>CX8Gfxm~*))EZfPc)fE!%AJf_r&IOgmoFZ&^WMlgPTb z^hgjFD84^Fmcrnr65BAz6Ao5>$_9I!@70opU0}^82>g)u^ns zZst|pDd19ji;>HqhcbiQl9tcOhI)+b$A#9~r3hQw3v|h$(Bc`W#jne=pe z$&Bk_oBW0~_6T~s*981XiEzrg=~v}%8^W5=b-te@|Xh3QO{f`wDP5BtIs5C z(6e1yPr@T=Z=2_VhrYW~p zqMaQNW$4()kZ!&yx$8PVJ-~cHUV{NDnc~KYClH|#X9D1AjTWJy2{rM(iXQV z#NHqtm5Gf|^L432fK}B(R%1}I?&d0fsH*2zG*TrC;2;mu`SPQi#mTl{#B$M52vaD8 ztIz#`5dbABopqm6AXOa8EbAAOX?W9>$uutuKYUst3^&O3wL!47{CXh%IUNq{Omv9R zCKHNt=Qjfpkz=i}42o8{=w>=HaFjsU#y2+iCap%- zdHyo4_iZWIKvyL+*5oj$oqGEd%Iv91oc8TxyY+Ra;-^?l5^wpEo@bw=R z+moI|4+>jV^VUzX4;)G&Qsi0Qlr5JZc77t+gmRBs1Jgz15MERyj`*Z8O{CHrJ=R~s zh@@;wFHXjNw(cjsGgg*kz)yk)WDFpWBM{EdV{5%!_2b7DHWfe$^r4-1@7YCZ)nomF zU7%Jz^@!GOatmRpWewa|(#n$Bm1FK_Pl(*&b2+t+m#`|CIxve`c3~a9s7s7#98xIQ z%v5_rLG^pT&C9c0+~Zv3mJMQ98N(4tXh)Q*(X0trt;RHj#ao2oa84;a;FGTf(u@8U z?>Mt^EEMRK6kJ9+E|=wF3@uD6`aHC+Au`YaB>R3dd^UD@n&nG2%M|QXNK!O8^o-J+ zs%pA;sK)KN3Hf@q*bzm8NcE>-u4rzIr|m?U9Isi^S%QK}OhXZ%H|SQa21aq)vDB1n zCIRqbn}1qLsXW_DZ$8Dy%ne&om|G^2N>y7kky$~20Q&A=(Xw~eY!qfxzd_$FQpa;ema>3`>isv7mWSWv5J61=y2- z`(!mvfhSaiUA2KPKWl+>W)>Q)W6+lZz=_Ns#A{Is-~YazNfDk}3IiaSMrA9WR;4G4vP|9)o zw-1a=S{EG95s9P8U;I8;-0aC(N`Dfagjgn0pMrw--BC8xxRCUqu(+J!f*Az(9vlH6 zndUxmDJxxCDrIsF0WRWyq}&;+@Hj0gz7X4JopNavGGj4r9_%&k#Ae)85-oew&O0Kt zt&HrIoh}$kC!av~d40M^qD*(U7;%YA6;=}8mVURGZ14H*cNB8Ay(RuzcjWJh@*4l^ z88n-Np>}W~s8Kzsu&WY@9Q7wW9b?oGql&hdg?*vIo!$5L9=&NY6fN$}?Cxfg0dF50W{cmhk)qX5_uW%D;CcI3vnGG}i5^N~1PpKYK7%Zfhd$vl~{ z`CS}jK<`9q=^plwfuBpXSrqq%aCpz%aoQ}VjV1%-2K~8XoXI|z1x87e==zdi~kgy?=V}t=S&FpZ1!WT82IVMyIQO|;}Dv5n@K4>+b za%Rnx&NN%0vse>iVwyp|Py7o)vVwN@0VJN{7s(|S&^Wh#`v>Ij7Z5ZCo>X}u1Ge%r zulJbqh@akj{ol13s*Lq080@~&oFnR5q_zQbX|LVmo9p0OXPAmL>7c&dySNiVTXPSe zw?-#7ixNDbVvW8Y#cjz2tI|w}$_GT1Do{*BdB(XS;OreFSu7Tdto)ObeXHY}$jjxO z3pFVtJz7~SnkCuTnz|cH%Z3o>h+W?Y zVJOml=LoXYXPVsg`Z{}iXnh-W9OuUH{mKUDCS5%p0F!Lh3>_J-uQ=!Por^=WMOX=G zrm4#d=U9yOGU*O?ve|MC>;5t=`^>@HpKV-~4|xz0Le`^X8eiK zJ%t?oY4?KsgiQ+y2p4xP%iZ@U;RQ0aWa=(F?TzTopT-Qa&6do3>)_vQ+fk0E$0z;E z#Ld!0Q_@hYSgzLV571#-xjuo)l>^|NydHEuOfj~msT82+xRDMLC!a` zNqRiQ7zjPSm$#fb`f_OehxvLa5GQmb@T}CzErX7qYBO25mRBJxr#vU~mkZ(&h`J{8 z(^D!5Fbin_c+`CM4@0V1Wz z4qhlE=$U38n8kG~wqTig?LsBd3e3`|2NQ;}IY=!r60UI)K+sur#pkp281efy#4Ye2 zDC*uj7}lJ5g9E#)8GHVv0DPLuVjF_-gVQz4vidRe;R}<01+mJ*HL++bZK!F zN(D+F;*Q%8EQAcH|^);o*PagkK^caQUY zQjTJX?gC$yNr|Fs|vX{Mu)ncTU~fTfz4+L_wEiTb}f$D ziU`Af^s(ZAye`T^mO80&tFIKA&cb}{-g+GmO(@MxJjzrj=T>&o%ZBoTyLoc9%ks(! z%ECIuP-JA)javS`O`BP&f1oUr4bJjc{_0&fI|B*;(RSSdr(ee#dN%5K*Y3}tX#-N zWEDxg$@+8yjdprAq`hXFT^6mOS{F16u>C>8KAuMLEwkHsd(9xH+uN`liHm75pi!pQ zWg!TQ<1Zm(fPq>ioNw|=H0SxRR;^s?#jfRZF+Q~TG3mSQ9$PeUBUv5GBXaZ0@N#K+ z#f-XsP9n5Nk|xYY-tR6An-*CDb2FG}*A8xNl>^3D{HA|M)A@7~;FK8EzlpgM#hyS? zE|=-l1`5T#A{eTr{zYsNcDhajY{Pz5aDtJqJu2tcFSI29yNP@}+CS^9eI>}7YV@?% zqr(w+Sgdp23+Q8APO7lk-6nb+s2ZcLvSj}Oj<@d#VgJND)*`t645(W(#b4+7Gofg0LO`FzHj@oHHnPG=JG7$S{3S=CE){XacQu22*9v! zk3M6)+tuv2?E^a(%MZ}jvV^6k0m7HY3t~bs^I(+qT-Zuyj?>j^+N{kkG{fc9M=o~@ z6gFu zB=+*d2vXppNhb*=HZ@Jt6GB>>d_?K-+_h|60vAj^X3tYun@ZAsL9ynT_>Xedq&A`bCO6QGeCRXsH13Tb3EFHIB8U{@_=^HhqgjVnY0KV4Fy7 zf!8&**Oz3z+t-={qk7AA5`?fi6Jg?*+uU|2Q_|W?U_@jfZCIg6P^(k*g3$?ec9)}C-{NJbCoZA&E0B%6$-ti~GkuCRmup z1o7aZ*-)gB5#2>K(SRo;)`??>N-N2vNmXrks^>4Vf7^pdl$8}wMGZTU#a9}~TA7jC z(Bdmf20!XPHK@@&%vrh`9*FY3n%_LP$|FG;4?UaVG^beEDDje?jg8=hm}GT^!d5Rp zESZK96CjJIw%=rrg7PJ*Wn_l#R@wBErdj;YtD8Ji3Qdy(O7_(^(R<|p#2=5H?}wI) z9UCIjBBXX9`UScKpK>dm2OipS_s*(A@GurI#G~Dh+Kzc1rI~25sUojxyK6{88}zdx z2>;%bt->zkHx=kcnt+_ZAL(6jTU<`UjqjV9pElZ(b??Gd98tJr`9k~1v~d`v@j6gR zgr;P?t0GMH`&C2SlG)+?724DGVAfY#zkr*YvJT`B^ZWnEi7 zw{(%}z@tA1hmpGIHD&H=Q>(MM6pXe%8#k+~#ec?T``)|-#Oel~GcdN#g zeq-yoX^KhCJ9QRC;O37{H++)G9f9~xSbmV>rZe+Se&)HjvnjQ91N@EwMumYW2j1v= zhrH!BMj=B;nID)=b|EKz_Byw`|2#!K+iy-fm^|#-kaWUw711mre!%V%)Si(TenCi;>Dg*0COHHo#)q#y+}Laj4O!(kQ5TO&IWK*&-|;a>Lh0nY zkxN=~wt5?-8<~r3)fGuzM5MW>Ly*M8I2df@P$ftN_}I`HvinYwE|waWbwbGwxe-^^ zxbL!2zKwxMT?;v!K^r`opBua~0hDt;$n(c-%9HzfbYfv^I`MoHDzyeq9j6dg9OpMs z@u(`arPS?&Ek_)ID-fHbyrQE+Dq+atjCisC_fegDYgr$FU103^9EEnHwKZbv*6+Y#m$ayce0LL@L?bUf3 zcpG1t9$~WK5i3%EVpZp`{pohtC>$n(Sl{f`x(uX_S3JF;unoT%jIZWHlXk+(oYpd9 zcEg*-GW_%TS|!1RAd9r(2M)E9Ey)pPICwvzbrbSXwA)1d%WS&BkI*!qh8&4>BoT!^ z_)*?wv0vLpth-o`g{vm@LsM2_wKiYxRYw?JJMCnQ@<*XS@Lp%s$IN5A#wVx4DGTBL zXI+ThL@bbq9o~Qmys|FHc<~V(wEoiJpG~2A@WIAMbgU)VY{TX46u}M>ZS(Ry`JbtY zOM9aR8r^b8Qby+#It8hv#io)pj8ODQ=aj8_N!{)H z6f#`TE`aWy?bQ}fxNl7!HZ*bH4RML&_8gwxkvZq_HAZr2<`0$b4Rq#JAImVj_ zz~W4%5>rxgDHjQ8-*`?pdR^5j!2||;SMx6Tlr{e)lwY#C02G|DHp5m<5?x9XdVV)d z-f03u0F@7h4d__-{I{Zrn~_DPzw3wUp>Eo36TL=Xk{NLUeE$qTGF0&pU-`hbuV5BX zrpXQzV#tP~KAps@!^X?oX`dt;<&>C+Y+f&^nLm4g2H)uuk*C?H722<+G})YKwqbp< zY=6GmZpiPKG1>AicSyu8*@4`g=!`yLP!nH~F!Xh%`vynT=Pu+&r2wx;b%X-@KI!d{ zRss95c$&Wd(`A-OBVNWwV!N z2vw5h(|*{1h%P0H5VmIy9G(`f_JKHQ#-Ge0`GILr{>Ne@&APCEenI=(ea*at-AEl_ zwKG~}dg%8`JkdkY%@fn@1fROt7wr6)f)4#UYw6TIJ-{%AL<3we(gY{oRC_;PbhpX$ ziFWIgXdc86M@(?HzO8!zd?>1`drx zG~gd2{#*X}hFpcZBbq`;ji>e9%ZeXW)#MjPpaipMd2)T7&h}>!$1RQP4NnqZV3=Q@ zjup!ls}!qCl(>b#Q>bK40Pwyi@YpsHx%F|@nbkP>VSiS^2S+QW>+5jdESV<0g@ia` zyF%;aA=kGRf7@#9Sw0_Hb&?iRNP#YR7x=dNKuw0w)pBiScSoiPJoTlrhveS|8WPRx zNa(C0Riq)J_0Ne*I%@dkC@XNZ_4{VQ_xW#9r)zfTl}~_TP09wF)Eo%KUruBWc!seL zv3;IXbCup*VL3tc{qI5gX$W3>ulP9PFAd(AmE98}p;2j*9zWYL9LiJEDX;5Ya3zdE3u;R{XVWDwrZ#EIPugmD ze-bPmEoQnxeo;lf*psiWT*tZQ(p#6;QLU^F6~BOL#X0uP0F>{1-G(5qyDHeeRjJnR zz0o&4BQKIK5X9JDP+8t@mmAiLMwOe5rw+|;pLJOE9);JCi1c|N2$N7moGbY>psg{R z9DvS$4+w4xO?D_;Z0ZmQrnu+Gw29$4nx6HXB!Vs|i7d>4Ujo(iwPSKDnC;#V<^70C zc}}+b>Mlg4B;F})z^4wg4a6#4tZ>OIqa{>4oXT3KCsY&xT-@#OCvHM$ zzK&TAOT9*-PV4>y`h^8`c-S+AmBwOhSr7-e(UBDY%W%)-WLC|apxH-#8@ZTl*FD0A zC%no4NY9J|0OJ;)5e~R;rtLVqBnb+Zsmy-vdsZ&gBY3v^@=}%eLzoXSoA#)SL?3B- zmZzx?iOQof8BsF0T$R5vYPI-+$ftT8hD#N?+@+7-rKSM~=i805_@X{k{&0$DZCklU zW(KAdUr%wdDFXJ6*z{Xf+#(VQ!I4})QC$+wv|FvMOuT>KE+hv8SH;yUepVCI7N%KN zWu?JT$-YnqaRJOtNNx=bBbI5_Ruv2`~__G-mF7i)L5?=_Hpw`6HO{bUXQK)A!Cu%@s!YP0J5iT`Tc zCX|Y+R9Lnvkwt2N89v;k)1doT=u;IA5z6{MNL@5c!NN+91_zH_?58KtI08V%e!@os zIej_z*-X92OU)NKBDFTA0|}jZe4j~`Aq3{$Hz$@)R2uA7_Wb*R#`C7^wrx&!a+1&3 zNuE4}DysNDP!JQ%t|n2grvRkvZ&#U%GV!m)(pn!D{}z`j!V z7I-L{1k<06L&=nK!MDIng5&NGs|ABmY_cNiV#kjzFSN4M*k)xzN9Ck;P6jIWaViFPM*H5o@`zohETv@*yWg<$|Co!$?FvfA7}Y^fu7T3V7{AOwyEH3YxKx> zMmdnwQ6d?oReFyN@^u~&q>cvwGA_|<*1W76i5d|_zp7Kr5YshsYamT;tKMFsCcd$f z#2OFA#e4XiY+bEq;urpPsL3CBfdhcW7%uu$1xSwPSq{2?#i1~-*c@U+^Co%uR(A8q z*jms$hZ5}F`0(cVkd!x_5nR8mT7DE^{9}QVDB}0!5&^x6+hYon3|EY-Kc^FXDwduc zd{{9=VF}*2>yhsA$K~8KFL;)j?b`Qaglrjdr2H}Q*)YB`@!-!I2BGP%DE~ON_d5W@ zYa&zcV6eAv?DUzloriUP?{gga@AETd@OEZv((b5U*DGA~3XnW!HN?B&Lb?MJe# zrw3N_ysWH!$7mz8U7$J6{zKJgB>rGYKumYF60Sow3ad7gI%n3g%~n0BEl5NeF;7$6 z@jdBeyaaU?=VFX%$Vhy@809XK2O-w|gnTdGfC5SExM1ighaCm^kR-^!88J}4Dx?VK zPtO>ysXw+zgw)DiP}trRS8iYBlO-`yy8ekc;zPc`Wtkv}bEvC_NP``BaAASY=L}7mntx$V!^#Xc< zz5@4|NR<0u=xT6dYOVSBR}N^=Bcy@DiKr5l^^gHliuqsc1WBigee=B>&aDVl0KwH# zxFjA2IWDgFsy7;1>!+}}7ZONCWrPk(5dtpAa`wQAyc5TdQ{#yU#jcYm4EyO52bB33 zouER>3G;iMl}H$v_SvItyR^_$t+PEr5}H-Ms`zurxwx|I8vwgzytMpdM$lV`Vc&CQ1C?bBr~sJ(}tRKjIh*k}KN^Z6F*LKk`x$(3u*qh%*nZIpGG;)X-Q z;ED<*Y7OX(jqHiJEAjeoDEI*A8sAGN(THSN-mR@rLDF~gyhDVMy743+aAeJ)b9yB4 z>dxi*%MIb^(O7m>+5^sHSl;^k7kKCpD7|BBt|SMS!1F zg2^MKG~~xJ&JrwUh(xS;G0dil+snURIH)-xl1{2HX%93^5j z_*cXYmDfywaoy3bQ=ttiSCC12|=Id45XH`Fk48a z8?hfPdo{a6YGNf8W`Y$s0EVRgvrP5ND_xYMa^~pQ%twXuI^Mn#t;7bQdn%$A=2&eK zZU-7ywOK)AHoy(3Jv7_R_{u@0fNSQPFRxo+965V`geJ|o%^Omo7{_suCY8kl?+od% zn=x4x8t&-bt6{HJ#u~RKo3v+U4$`p%$qi>`bjm?bNAM+QWa_B zpA-y?bP~3%dL^7Gmg_A_HUXKs77ZGSwK)C1-n)R=QE;8LX66=0KTkKMrcuA-b*kK3 z1)w@|$`zj3ut0eYCshP;qTcQnQ2hw-NyloQ)}+?R33Z7t3^+U>aP==61<713=v95& zT{s!TLt_xrv7*C)lKtC1%=U_2)|m-)AT?J~ZXJZ6>321Tpp32G%AU`}0AG>#2Nf2R z*p97O-p;-emqV>-Sjn9K{WK0K;T0{y5r<8ThVIrwf}OzU?;qaZtcC5msRq8#^M#l2 zlh5uyRrc6DOJM{d_0n81Yd4P^z6nZVKlS(ee6>$=g2jiPBQQL`sSo5a#@b* zvay5ivCs+cJTty2-rEGk_F9ZXaVUm9;4W)AJj|=9=_gs%`Bym$dhM#Bmtc?UrR@8r z4ye|ZkZC|bukoS%ITGKa3o}?5q9K%=QDRB_YR9YCI%BsAza3-+w{wSDQc(D{u&g>p zJ8bT7(npZ__FhM$Q2z^I#YLX^QnNKg=P7UbUX-#DUj-~z^skRa=Ej#~WnT&d{yO}l z4DdF2U^rOOs@_1-1$dBl?_1x*>D=go9}u$?V`7QFFvhufqMI;?v@fC12)M5fE;ZOI z3(Mb+(?kCgL$imp~mdA@8Ols+m4G*m(G^qSpXUablNQ!dxv<}cu zj;Z^bfaG7gH%b)k_Hgs$Q4J-4?NM^on0@}8CO)0>BsRb@mIDnkprx+#KvF7s;S7fN zi|XvFSi?E>@!!$T;Fe~;h&kcWWtVxGx=Nz!9QJWeI%*PxF*OZQFR;fdy2YC7*CYl_ zGcQCOnzj?dhG<=jhuy|Xra|cG$+Q-W>f~k%#K#aIX&R|>QS9LXAexs&Qzi0bS^Z{h zbzx+0>_LQPT^z`IGI-(`HTle*;tTc8wtx-a39lDqwMx&#u(>;uQ5{hl*?~=;im0l$pYJ!Dm48?gHS_S{h4N4f85nKXnfU} zWNfuGj&~h?zKwdJQ5}M}yp8P|A6_n1RFVo%Y*?fO&f%!z?jOgaBGQ82xAVGam8iB1 z^}c!I+faA@0ln7S2Q3 z*_Oks@nTx_g|Z@Zj+VT$b8A}n@#kC@<=r?g`{;s9@G9?&!#&7i%hozL z6^EnE=m>IEm-vtJOHxEv`f-5;dqn#Q1VVx-IF50<Invul(Z@NWzEuV;*)FLNzb~BxUEAwfF`aPoH@i+q4^r^a!hiOnU^7 zvojS~!GU#+ZK#%i-b#31-`aW)qM7b5@J%BkAMzfmin*mfjv)K<5_iq!=t?Wtsfqvk zMTU?^pwBfqojA&*0QTiPd^Q*KxOj(|_f~>ipJpuPcV3k+Be+ExPB5fNyVr|q{ew$~ z28HKWKDrPJEK@O!pwP=ok|(=F{F!k~~ zOnQ}S@nq+&0`(I&rqjr*v%RGq=!0%v`C?JT%YEe4TAv9(tc=#ZeoN!1`K zfRmeARnq3qE;$~fg%2C{dflj$Z@F=W=Fi!a&#JiDM|GYVMfZ_swt}fKzPF~FFMpL# zHfwBHl;hR-%Kb6@jzvBmy9GO0 z#uz<_9HvwnQFtl$zj2CQW>$MIAQ7MW}eoqolzoz*Pb6`RMSY{Jc#N( z=Cwb}P>@^Yo$@l2nC2?EWD#Wle8-sc?`O{(5ni^Y#+-*_dN`}j#Pa>yvTzYSK=Nt<6Osn5`+TH|`tvs| znAv9GJsPBKb04rnn8tP|2bUuWcXzT@L^f!r8I!uOx{VMh@rrT2Tu@n~K#*IO>geA} zIB$a7v?Ew_K8Czd4e^(x-;f`9V+M&mCrf2;&#eWN!ErKsdEoICcN6>!Q{w(nadU_k zsW&Pc!v+UZX_&u56R+7BeSGBvx3|-YEMVV$175v^JED$pg#t&=`j!8%uZG{gk@1Sx zGyiH0*n6qDJb7V0-h3eBE=)P0k<6|t83|D*F>pJTIKPxg>LQz?Uz_%2E=wS<7U~stx z>c&j0E=QI(5-(n;p(T)pzWfEhaW2NhvsT;R!bb6K{N6Jos0q98Nq2Inou`v)X?(zy zo^{lM8w&_9u+}M1S&&2#_|<&Ud)dZ2b91O2HeyZViW;-F@4)mxDQ~$-WN{c^Q_P7E zP1*9Z>)DUG(P6bA$C_(0k+!YSyg}te+6y=}-K@+lNlJ{X01M^9285+KF?eNG6>s64 zod`*%_->QMqJozcbLCVJ@f%5r`OZLMd>GG^U#_k_(;}$1;<4DQ3pFpby>P}Mh*vY& zR4aDyJ7_R>)Wxtuq~5RLns}Yd1+|j>hdL^?R^x;!OZ9F(nYyk=AYGewC%=lbdGCr( zJlIO)Z@L$a1*!OeCaie=`&N8V} z>LD-76%j67#rL@N5Bi8_m)?2fR?e>9*U->(KOOa zFA)};{vRE*`J*^YQHOdF7G}{7C_;j)OBZUA&giJ+a$TMJO|A*i%b9t4 zTcD2wN$Sqa1<;%Hwm{*VS%?98Y@>gWNWRlLs|Ae@c>cT6I+ccAB2C`YkFFUG%Peds zSAn_W`MOv})qE~?i=B%^xV`6H_1-?Qmfo0!Koq(~;h@1|OmZgV^)R z5hwd-Ygf@v--}uR8Ea{K6@HQGAQt7WkiC*|y8bRkMa^-0kM7G*l(Qmi_ z{bbb5S4HFfqB1QVr?DtMThM&Gc~p>5Hutz)GXfbihI}NzNd?^qJ?#9vGssP)pA-#l z-W5^a$jJQGj`4^HQw#KF2ui)v$Y_a_!~LK*8a;g`DItv!zQ&X0!5!DM{eI5P^qV>i zTo{Q;hGT~>#yeQ0{gLJ)`u*UjEku9$cUzT@aB6`Dp;T#6O4TaHi~L|7Qg{JNEcxPH zBA?E`mn5)rjW}8prHNg+hmzU4#sTdf&&1w*39?G+T{E)=Ge0FB8l2&MJ%+qa;xFg{ zv>a|z+NoAUsjR2_%DSyZ>Bg5sjRe=d`#W8XEHO&C3^P&9jFLrng^s!RG3RA?BD(Ta zhLG|x;UDNlvox{#O0%N&`6(y+D@7(v(ldTxZ`rNy&Fd#6w%J_SAjUt#L4ke-kkYimHfgi?Mw^P7v?USO&#MJd*AtmPlNWe;4_F^p` z&EUe$MyuGivw?EabtB-UISViZIYva#$a#|om9F|y>+QVml{WtxJ?&01 zJgFaFEUh}^>3dJVqNwFy!Rpjz(?a*}Qz-!=Ck_NG$g%uHpVif)G?o+Ct|Bz~pa!9U zrVGN`jj&89Qfmm_RDYX~Ni|8Vww}AiBj`M*-9N;Y4Ua?GHwy8^*qN{f_;H}vrYK2? ziE3+09%ub3PwPR(lW+wa_P4R+(_uw4oAGW^Hr`q%G(gnY~|>;=5kH= z--*JyJ?r+xszY83PlVv>Iumq;v6}H8w#YgqmlMBo6x>u@3}Rb7t58o(9S>z+R?Jje zvM&pI@pdr<2a5MisV}{|>=;ar=s22x&e(z)-t-dVENn3P-K&V9W?$2kIQP|;F&w_< za6os;@1E&z_FXFrk$t?(@iaC*0)Tr+Yygq=%Yum-bpVQ$nc#BI*Oz>lyM<_=YIHTH zVkO59@_oZ__2K%ev#KLz@XiWvveKjG4Q+tM?qnt{`|ufZ`qCTrTB=G%!Tz0C9SIvo z!<+YGcz9S`-N_hMIkevrg%XkF>?9n}P4Xy=TrvO+ZXRd0u1U^m=il+(7J6lM4HEmeRROG6j`D`ZaX`if#yA zz$KCR^%@0Ih0#JU1Xr56^e)pY@qo7}>HJ>~iq^lHH#1B7oLC0!-#u_RxBOX?<=GWw zxnL_o&qkOnf{!K6_}Tu_qC-PokVaf;J0-sS>|O0BpHCqMJ8kL${D6C2-N_xc6KlQ` zv4rTPw~bHP@e()oo#s)O84-JbDFZFY4+-tQKI)gi-lStKx5c+TmcHcoOF8ikr#8b( zhAIs?n!_&j4@ZqD&C%7;=e0tF(HQ|gcEUeYYvxjK6y}ykq7-ybjbzgJILMJiEu4u( zbzSrk3*Cn48FvGcJAB$%3L4JK%QHea&{U% zrs}^sW7_rN#uc2UZMQt3S>d=PfXQ8Rb2#s1J^3s(_V zV!$OmbzsnAZG8V;k*l!_dN)4qgwud!x%jltJDb%gNwo~OnL6$X0BHtQbiA1@CU;qv z$jas3?E3&-k14sheoB1dmIjRKJb@xiw3W=A9?Qm9)2+>~jX$H7RY1e8mr8sVGHTXK zi@b?Jl;n^+x-r^O{yrA_&{B-vLv1X@H@+Vm5CpbP+Z{ioqa=$ZPEO18@N#}#+Vv>H z+DzR&ZXFcif1!9KG#|Y@?g_sksx3Pec7>_|z!F4Y(}N19U@j03t%tkK`UlC=i-afk zIXQH=nv7gUoc)o3dpGO1`=vi|>?T#$>5zj_^JTTDEBB8SeasWx2t1jsd_D4`p|>!2 zX|}a?EWZm-F2?fT!Kz5>HZjI|^(uN<@iFa4z(sgE{8(DVEQ-zjG@36#B3~MNr8dtQPJhMuE8%Eox+waD7c5hHL~$Fc@=HUc=j*73Y&`9SQT>+>E3z{4`VrTwv_Vu5WJNn#ktZ3~@3s zwb9~EO}x=h>Lu&Y8LADOYvM5SYMwb%e0P{0Z5``-`|eDlU`yD`Xk#ppCOL=^`VaH+ z852mLpQo@)02C+eIqNdZu2A#f9K~Q$zF&&Vq-s3%?_@?e-SgQrc{-dQv`^^2p@$m#fz>Yms-A=!aZTGQxkmdmtQ8INcyG4d_ROfA1aS1 zP}Uy43+sYLX|$9KosGEFwNF+UzbcDBu_Er=9>GN0~LsO6%QJyTDPqL(wc#hzaQ*5+2Se~LC@$_U7}KmH?{NP-3T1eXvT z?#x>2?7hx=`<#1EYw!Met!=C^$Luo3H@b~JXT@u4su1GQ;h~_Q5UM_c=%S#Y83F&7 z;$Q)PKgxn6QBZKRgY}Jkb*%$fJiR^aom^oozCoTa7FeK@Jqk+Tx2jBIZ#HQnrMuxJsw;4bOFpqJsnQeUtorwnz6 z#jt$)i{GOv-Idv^vuZchQp&Y}3*V*mD1LmabK<`cy`3T$VnFHa^{0+PT z^hbvq)DOdHwZ?DOt_FUT>s+5JQ7*|dZr#c`O#1xV^OdDut)6Gw7W&nzAm-)S3 z-r$qj@e{2c5hKswm%mwll^;WsDi2<-RUqcr-I42s^fLtL?+a=&z-&dGT)ixqM*U;R zMFA}OGlbnU$a;^;)tY{}O@|BX`&+=v1#68Stj~?!?2ktHH2aifahj1(dkXh8Ff#o?ianDTgFiUqjQ1I#+ z^VA;&c4a)j_i|ZOnGjXlfZ^+eObG)v6As8L-|j=as~4Ti`}ULx&yP>Z)ZSay;;KC2 zUf1haWOUfz?MvQ#OyH-Qs#Jt{?Dx4SL*F|^H-QShxmxvI0u{9C+xuyKpEMKXl!@sc zUjqi#V;&7tx2jVnbBDq)lQ$P{Qb<+kf`&L1RT$lVs-Az4fu)zf8xy-q;Hhe}L#3#w zZnz|Je?lhhF$>$NZruw<)W6V_^B$Sd(~`dVQQvAPZ+_R~va(+v7&Nl^zL1k{_11=)u`7e;*|HpDqj zYp$v8)+HUPuOb4+R$9ixuXhD z&T=z!bkoT`&Ji{C>y=c4K4TY<5R*-w^$>qWh$_$b#u%w0NhJNNsRXU<0gvZo`^fv0 zJy?OlNN}Wdzf*9!)`DC6)HCV1h3$!Mz53japRTE=`cY2PwIb$f*iW-FTxGRlh5`;6 z%74}?X!%L>YUCe$eMleRK60&^7N8_IvuRCtrZ*>}z83EMI?idnRiSkJJ%6M-7eT{z zX@kok17B6g)J91Hepmx0QGw7f>S>u%oN>rdFB?FNQ>Ze5BR`r&l3%y5uPe+00qir&8e!20ZVPsoBpnaX^#&~UN& ztgm%z!Q*7Hv*N^_;B1yIZU`fq)O?r83a6f!raM!hbE0@sav;6Hr)Gv*Q{xK5VV1i` z5*&%htJt`9N@mzm2^>6)qaAQ|*&4sQ3Hy+z$5*w(P3hkX9re;P)MY3Bl=xxEK-M(w z%*3t7y`u0ZauRH^C4SPgMksQ?3jhiNa^b@z49}{2@jcv1HvV z>s%s}qhQVEls`qZ`Nr5Jl2(^8_|1xY@tagG+m`W95^9zPG3Z!DmVe&o>=Sp5R;{sXt+82V{%JpI3Fi zEkIGDxO`(e9!ao6szw75J8bIKS^fUHhSkQK>o_C8fmmesfhOj!hq_X9E^!|=zmjOv zuzXF=dZ5gxYOgK8+$Rp6xQc%9Lf9SlCh<8JS3RMk9_@T%5#%f5n!83GS2JX&cL}U|DNZRK z6cm1Ov$|g#P znCW8%QiYU%G)$YZk2;Rbs(*O733DiYD3~TAX}J3`>J{Y6LOho zLo_?K-vtGZ8*~*y@UXSl=PmK3)W!}#18`!ew`xrIuibmMSluUPE-}^pOq|;ke>{=g z$is4D5F;?ziK5WQUw=cIrutC#Bs z?pHHNc>&{xvE%-P9~rPWzLweZuW+j9xkK&Dr<@G1K3|!g@rif!Bp$EdV!yB#u>MR? z;evvKn(U;gsI97~_#br|P@QFmrL?Q{YE$%U>GgvsF)@{-Fe-^`9#Zsm_j6O_E0j@I zN@cc%O!#FM`8m#*7Nv@_SkONc+@EZ#vG%j`p+ZLk z9We;2zixUYQ|CmVos0-E(itPOBuUQneXwd+&BGMo4Q}F%mU#W+D}h(E5%1SJ-N8yn z=RUo@iOP}}owfU3=_g_i8hpZc+1Ifv*&Xj6T?tg-kZISU5@PYLvnV{w>VVE5CaPc9 zuqv5*jWpD0b?&fDWUmGbzRFUHmw`>AUi01ByKW2)%9IL7Hg-si4YGV{z}M$_cH*-u zF-gvHPGGiEe?hh(aiJ7kT%tq_*KFC^(9qEP6T`7eRL>OpnT!h2+Bgh zgDYf4?j3V!@`GUfM~i4z*j?K2w&_>%((7B|$(WDsex2jqqV*5^7IrqyouQzRkUIe- zw2_9ol&yyw*xJs+1_lmv^8`w46cicxKu>F17nm=L4a~vGT^4lE+6iKDvXccFifRaG zcq+miogM{y!}NkR^=*S)Y$feL@^W}Gfl`0~H<+(AOQ4&pyN^_$Ea(rpQo#SehxtJ) ze~|dP$byVCv{@8AykRUNU=gqYpHiTczYs_ck447Y&R$9vqWqT#;7S(c=hY`;bPDFXuYvGsQH^mX!ZXZbDD z+Q!4rR~7^U?z8+Q9Jp(%6bKCdrQrAVAL4y{?f6xJ3lU&F00F;{fPe&_fDoUMB>&&} zfx8+S|KN7_`3pq=PyRq_PkuqL0Kc2tf8y}*Rr3F*zJJN#qYs>9_;q1E9)8}oFeQJO zyD!_{mHPPV!v2ZL|A7dA{67isboKN3`=tGRV84g{Sh%a5JwLFjKc@b7r7Ef#+W+AB zy$=pfZk~T|{HFdprJe0R=sf+rUH_o5v*m}m!rXwB_yCdx{}VmH{||+Q5YV5cL~OsfXIC zvLGR_z&}T{U9ElXJ-ppyLF!KKeu4kIq3`4d)AP0d4V0jmpn!z1gpiQ1khrk0sOUe5 z3}D_qfT{nMDJTFIl=$P0ovoBIAki8yIVU%32N=JnyTczh0Q963y=88dB;Ww${I0=nrC;-EZ&u%fA$?9r*val;QvHi2q-b^c+0`-2czy z{FC$_QXYEy26%Y8XnAYdIKymx|9d(AiufOrbb-j`9fjQr5P=QIrMwSo_25{@4Nl$3Jh`I$FCsz<}8LmtXwH zxYNHdS78a5pty*=7@v(03@}$gF%do+VVESJxHVwzlJ}SbM`B zIslFX?6WK2xIQoz0k9AY=O1>)^`B=G;0Obz$tNHtB_II$bAwrA_-slb|04wcN5ucOuK&;A!u#h*ALb5}paH-+o=O~V1n2=^ z*{G>NQ0{(z=e3u<07h^<9~t|gpx~4L{zXN3l}!T-V*9FUC}D4)l2D`b1kFDjLP23c zQH4Cz5B#?GCLk25^sB2A`=O3b9gEFw)W&RWEvcb78~4m~LL%L${X|(sb=Y)$HDhaq zkxk-v8{2*+-flWuLlOTzvO=_930)UAg1sK7Pl7!5+xgEr0y=WFAHa~pe zvZ9H#{{PQU3TYn%n6VDSrP+cIb1g$HT`gLzNbSozW4cVDpD5{=a@ZXhEmwDUJ$C^@ zhp2}{0_ARj?y;&<0_xpD1ihRbF-Wu-1|$3#5~DdDy#?E{a5@3ZK#=JE z1Z=DQ{8|v9PGsChIru2Db17Tuq*=~*2EkI!;kOPFEhJ-(zAbmH>^H3uxS?3SnBO7~ z;N@KzAe+JMSnCgJ%_hWwi{p&OcGC?j&37hRoAv3u_`J#Q3Qf_CD2)h=7y@qQY{4;k z_=osoa+)_05q*-Jp+uqUX$xBL0EnG^*07J06}y#R_B$1MD_s6@rW4&-&oM?}cnzG{ z{>4_DMNTdXxssiU$zr`rysr`ek~KXF{Ysm!0mkL|7d>AQ(8_;$u^$DYm1C;CA z)9QzcgzrTz-qw_&5Ti~PI|)!4gYnTKu&{z+f?^HR4=7URM%Aej(RZ#emrbpr5N%d! zwgqj(?@@N_YxkB`lUAPw@u1M7DMn^n>IGxz*DI(bqunoa#y><8APQ83gmVc7fn%jt z7KTTC6eA9GI7c^*q&>>E>BCP>1P)&zu4jx7z_fb;M~n@H%-64j2_;m+QF2f%(Dt!& zs8c-1!pH03rjdS|M?4;=*zrTQFfmkj{A+q67*pr5h=4jojuYLOf%w`@Y$Jqf9GyC0 zD8hAEJyy=Ju@b%QRd)%o8+}ge$6(KA2ac?r@e7l80d<99P?iAYmY*yXr#FtRNkky>GrUNrTsYk!~o z+S=u8-0nUUnd&FusfzU~SOZXaC}dbkg{_J9>|V%yAxm@zbn3w{Te62a=J1F>SiQ=? zx+HlMg@hiYn0OQk_>CI;On32v$2G`kY-G}1;$32AipJGif22?37F#8s*NBhavhnhJ z0&c^1KXcjOhWdVvsys%>mPq;hzy&P~e243h#cw<&0p;7VnCW|rhiytDR?i6H5$*c% zd}NeOJI4x=!}|HZoXX2A+KUTXBKn+(R4-hZmc(Sv{2W)lmBdauzoz3BzWm6WhcsMs zr%vaGv$i%^nu2#ASvfJnOqQJ(>&?6ZCQuLE$s8|(d3JW@E~2i)rkz|}JqrI{($C28 zIlu5NOKy-mzBNV19T{}3Ee^>z1%ZiVqVQol$Vyz9LS*OYdq7o0 z-$ER-iL%dSarkpOWtjT{G zWuClwVh^Ry&BZ#fu8DdLLMQ)J6Z}AeGR`L}ZB+Z~uy~A(W&*IpUZl|TEwZj&B;Cd3 zJ2ei%TVeQ}(7}V0;>Q8Y$qj9^a0W(2#ygP>*5T2#hMT zKB!M(o+xoBj^$pwcCGFuM4s`fnUFf^#KjEbm$AxR!f5<^oUu7qW3#dH3vyRI12i(a zAFJmyJ5s0;1q5joqr>BK2amI3npa=f1kk*;9EWdfR|5VkO;KFYq`M6X4D{4?>2wY9zCje#R|4Q zq4qf)+#kGNKPBqLp}~XtdhohCJj$Bl*G(nq$-RJDWDYmTGCjrn)Z>`AxE89WblT87 zb%_mHyCrS(SB3KWf}X$2KrZrt#HmfR z69m4;p^9^yBO1NVde2tL1R^MXuC{s?WDlb8H=E5N&2INWTKkEUF|Ti#?HNHk0NpK(WE-vp?FP@ErGR()QX$gaWaQ?i_v%|NNw-R4fjrmCk=6Uq6 zZoTFHbwM1eDu&|g6^<%;xH73KFV;)Mhr`G77hM;~7UUhtucE_GWKS8n3}XeS#lPZj z$zEW~Q5mg}K zJV)8Un@{`x#8>lIG`_8FwL97vX-c|{s2QZA8}nhiBJ^s__2n7DXpg%~unW<3%rQ`8 zwgBX6&^VP&jy%d4`dP*HtJ^DiPMi8fv@J>#EgKFX_ER6`W>(w~ZDEBKBiiPM@h!#8 za{AajRZcHBV_f2gOoP6T4u|> zvE&`77$eWHQ@?l2fP}eGZ!g+ z+MGZD0xeN}A9JDc(eiz*ldXR0a>;X}t5fZd5<6(Nm(E8yIhS@ESOJ9i#^*6?T2vgD zY3eGj%trf4q_g6R=IZG&STWwSdi>0@9Tt>)X%%gQK zsnJ}Mc8AeQoq=raK8G~OWp;YPwi21yryUb+WCl+^eDlk~eCkUZ?YXaq8TYsiCaa@^ zc*nYeq8yyQ+){!YOg>?CZ;^e7<3r1>>qLa+*5z9=8dcXIH;Z?C*^Rvz(QEyi@0o4!!AY9|YG^2}Xs#mb2AVe3j6w-Hx}Xk;FWIf_S;U4x_$cH9+t`Dwi<;^Y zqU-xv_n;?Q$jsg4QW_18A-{>M3AvP)?W11H=5IHS)K7I9Y1)3`F*Ze0EfV{o(l)S> zi1G5G38TeHOaZq@W^-Mt~nv<3i#ydW3YgpTsq32k% z&WuPG9}#$*wuU~szyf*^QLhp3@D5B+dGbj{=9yF)U{RR3YIbuIXzwxz`z2pi(#PkB zgt$+YxjD~B#Re->cZ+kTR5#~$Ek#TYhHX2BfnU}`{U*qsZ+*nF@lNmW!G(RtA3c+t5t7KA5x3HFLRX6az){!yn8Kha^AC ze>RK?oBH4^k=iV#?SX#BnfFL;;-@^G31JDr>4%^kots07Qh95UvR zq|gfxuvPbOT97(jJXN*jK0N4Zx!crBDN5_LsXn*_R49s3M0^CYRK)RPgKUCoAaF_A zFdh`y?fRzcdWwM`o_PqO&nQ8TkMP2TYq{bw5HcRaKH%Gs4zcx^94kh=XbVOL8@)8^ zYv(U-c`@%1zeywwx*qFaw@#WR8b!S-*0Bd-BK-;|d9Z)F$;f1&Ev@nj?j>kw%2;=e zFG1g@FslW<%24P;49Noxm$=L?h#UN6u{>SK{>Z!P_^zTzd)H#5^h<;ce_wwTJ^r=C zoP1bvD;Wa_6eLZuQDs%`ERH9w@cNAo)xn6sw+*YF>TOGsl);lsfp0ikk)f`SVN?MH zi)(;AFU@H0K7WD`|Il5_30Jwnb$gU8gsOaH^sG3KVxAQ=w;Z)sy&o|}l9CV7b=>bM z>9RLFD8zpmB{P^f#O&oYXFu#CN;+!{p~EY$cpe{CyEGZ=`Sznm>`{e=T0qZ>B1`?Q z0}GP#q)Dh>hRARwa3GmW4SWaJ^OU8w*vi+ulSZU zg&4B#b|)-Z`|6X+adBi_al!lGpoX#g<5~*aoRDt3(`s5i(81hKJDR<4JyyX|6Sy>9 zCg&pBKAx-XH_bXR^z`_;j)++SUW72})4C2fw)9Rmh1os*Q_Y>vl1*{7oJS~WT{p;< zy!bj4%+Xfs*l?WEtW3f}bA=~%7GO}!x14P1^GZ0f#W=yP0hZ(!mXsLmvHrU#T*7vb zuN!d#qG}oB|c4bq(@&e;5Kg zBZsA1GPBo12>u(1*O46PuV0t^l$0JYnx+IQa!Xecst*X79XcR=k%;O9eEli?B$8U+ zk7LJ#MJD41?-bfA{nJQqYP$|RgYV#8S9*fNuxGfu0Z$O2Oh&)vyS$~#wlClC{Z1p$ z$Wt5S{uYVSWF!zpIu(re8-l7Hrs;5#q{PAF*cxnuvI!?NkS)5BXEn)nl&5pwam`AR zXBi2D_NjOVL#N9W#|U3%nvGawwd#4=l~VpVWl9L#5IvDO7j2?HNH#D4NE|-&PrJYN z8%^iYY|rF+&gIX}VrS+dOOe~3Ve_3TWK=`-vI{Hh&~|lb29G$ae;Htt@_8N)76Rgj zv}j@l#q%2s&k#e>q`8d238MN?!yUWk{-6Z0Z?V=Qn3U&2B`sKxss|7gT&)!`aoI?{ zNO7Rq(TSB#2|8w6IlB0^Ca|mlWz35Ga`=_gD4Ka`(Z%OEfO7BW*M}0CTgqV_$p$i9 zwSf*H6hw&foEkW`t4OS{_wup_BAib=anc5{{cN0qtkigfEc`=fHL|g6yJ6$&An2In zNUEGKDe>y80J&I&3@MT^h3HUJlDv-dh%9xZ6H2nw!H;ed?BoP#^?!~1bng>3a{KrV z>w5t8oGvLr=$R(~jS782N;7{+FjW_(Uy>r11WlmG=GiXRH7<43G9mOP4!Jo38J40f z)5_as^h()W(wp?r_gm9_X( zLe%|aZ3+dc2811g(>W`dY+L;YiYHW~L_O=eEDR>SkS!7AbI$QY;eEOHov3)0ccowJ zpL#*7pp%p%s@*2Sd5au^1h($KSa~1W(@&srkI7o!5+o7uhSws&6S!|pc?Mc6YV&E{ z0EQER^1e^sypv||ETAdWo1f$Sav`$HD`n3TI;n$fF~_wGaQuP>LJ_sg>pNO`*w$3Pc|2wf3}o@ z-m717o^wn&eyGu6m^-%ou}Aj9^RT8DRX%DTpPNET(C1JasaL#Y;pT8b2_P$S;rnNc zGdj)jkofc75h>r-o+N}@G!=$xxCq04L>H&2CT2I@=dJxNLWbbr0O{%1=hR~Ct5j^W zgLKtn8bRQguBgO;k;0}I*_=LcC_oj*0wlsIn!$5Jxx53y=+a=Gm-W}LFS|z)P~$I8r?VtM(UenGUy%-Z?B36lAYZjw~V_} zg<68J_~+3sp4>_5LFqjWU2P$DHUpGZh5DzIokym5h~&ktyKE#6Hwfb zzMZWugF2G5{;~o;vQV8&>BZT%@C@c2OkB;E%w^kE62#>Sl66LS25L%v;$}sA5`p8& zcA-GTJ*Qs&Lcoc+RlXf~C+lXiJBY4$J&prq+)Dd=moQXnyc8}zg~GgK>Xd0R_qi8C!R-)Dt_L2R4B1%;^+Vd8_1S0tPSJ1cl_5uA)I zVg(%}40UvE7P0L-;$J3Nwt&9LL#mW+P~6uF(xgf$Y%lQh!F%-{rUSk{OWZ;2~%?*Zw7GaM{TqD;xW0?-Ql~>C%bP1gTh%r4naZtyW-E5 ze7)aM^~7)n8$*ddesgyxy;9d3wY?v;x{RI}yKeY8_C#Qt;Fi+JJpgLX#b$2Hv8ybX z&>aBsevR1OO?whSVri16eTfEmf*90X%=Q@QRs>w>_4wUvo-}K^8!#^~AultL)zh?i z=@?%nJHv9yVGM@_@%fw#)JFU1nn_Q6qz`pcmc(l=Bt5~-$rrAUbeP=Od?4=zL}iQ` zwVuk>K=3?zhJyWF(#Q!qX|p~gJkCd`2~j)uwrPgJ(O{A-vC2vVx6Fs@>fV>u#I6_u z0mD-aoyq#hDzwDp?|nytFAfEmu}6X&1#T_ z_GUR+h#?(|oDkH;Pm-}m_$bun+Y3lK!w>C5l$9hIn~TVAqjLRUiM}^R!nDJcaj5I zA26_S@eVQ14GKZ8K*3%~bMmp)z1XRaBHeL~-5k8B|a z3(TeV?cgzsze|<%$o)l*BR>&?MJ3CFX}hKZZAS=%R4VG9s*}cFMgPQ}FLz&H+#*IRSO^3pmua%pLRU_$kKLSJfsOgRP2fq=h8SdRo;z#ps{t zOgxnAF5rz*p=F^6;Re~-7lsX~NuL&7#EMVLizq6SJxh&Zf_Vv02EJ|C_o+{dh-*Sy z*FcSNo)1kH6(JqklP);;!d^|t95#>_^Iinmnu1*)#Mb=8pb0VCNfM6MfpQoQ)qttS zT#=-ld5Wd6g;E^lcY(VO$PC_M-SbSy#tPEJaz}C3o}_H!4M1y9|DJM!dRD%z3_5n- z#ZO0k(4K5Wqo_O;NW$o-d)esW(NirTkM6E9+iFI@z{GOG?lrN)uTdUV9}YBYLz}09 z)gCbuN1*;n%Fhw!TnTgr9m|1Pk0g;Tm|&bEa9>7Ws6YG8!|XSF?7ZW|7fbcKu@CB* z<3{_g%ladMdY}P8k?kOv%Qh;GPAAHursF`!cK?>jXdj888Csxe^&;{PgG!zoem{&v zPj(c=Wk&ny(;O{kKBl>AGsYeo!m!+9=$NouvgtesfZw6X6 z7sGYbOz-TK1kWEpZevDCLnMw$wj}w~`j}BrsJec?3t$9|OUsgxCr?pplZhj*VLZ2j z%I1GkxR`k_!#tDsrfz6O`NC7(s@YT#EfMKVZDnh=FT#F{vyp(Ge26>dRIIJZa*a_b zYw@(MTEnw}LD^RlOrvMcx4{h|y<3Le+X5sq}xKQ)=3 zTv>uo+LT?oLc<`o>$m&G=0MZ^rv1_3mi$_j`65R4W?B{JhvauL^M&dglH?8~ z@^l#@S6^w6?dfRlTP^DoLrz`ik+GF!NK+e{;_(Ui1{1%X_9WsMSa9hN)z~Pa5tCZb_lrbMo##G=rd19bKuQop`AvsJ zIyHm=$v`B4itA6JLr<-s3uaL24|y^XWGP=F3E?G(Nw@E7Dg%X9K}>|M>Qx#_%3Ny; zF^aMCPJg*3~{JAt=FCjfK7}= znyz^1h*0$zQR~K}y(Yp!;IwMpqveGE<`5f=$6i}qy4)FD(vkz>4Kob+*+<(&hxyrP zx=rS2)>7l-hVGIuCq}Jho)qZHTMAWDFrdhg7`3@{+!P~4*Qu=nBcQUfnJU~ckKsm+H^f$)x0pzPuRAu)1+e2Pb5;$KX=Gb zJ4X~FZ#eHMJ)@j44YH43MVThZaHi@h=_?pjl#w z*s(6(d;KDb%-m6-j65Y5&s_9LbsB&^DW30M*yaAH(ygLTcW34QKCtjD$qs_ z-kJs9<>N?iJz?HzcSwL_t`fTSue_7hQ2LLUUNYeF0JC}8eAl6~M)&j>e$W}oChoKr zRQfR+`KsHD_}X!3T5a1f{);${fc(L>{EPmURqxld%H<@%!G5KUPP{Kmy^f;Zj&xt_ zo=bs%4k7JI1Lt0yCSc^0}@0@oX#j;#y4-g>&@H z=TmqGTQC__E53UgMZrrN^u#k`0`LUW*Ck1nYqF0NYUsxB=bged8njHQION9P=?FIN zJ)oZq5=1j%8Mt(R7K}owy*Wq*d}zN*3w1;4`1_qu-q?lOgB&eV`?&_!O1$)~_c;8z zHAwtf8JJ!6C-ZA^(6LSQR?g#w7pqLPdU-t|xbLvtlJ9?bU8_j9^D06md@=Y!=uHo! z?B;#Q@K9TCnmj78s^h#)Su7W--!E>|pxXyV_u)j8*I?AAPM~=KFRlXY3!x;<-Z{Z& zI)4iiFHq*Q6RZ}g!qp0dV%Bi|)E$l)wPJ`e`s+=Tou&6F3#{S;W=k^w99p2Mqo4;_ zx^Qu-1RIr<3i#00-Dq3egE=vMLlALQB=PCn@=4~cqd-KapzWHF0y?DS!@wxFNMY}))6Io!?5BPo zsh83G2UHovnpqz!#~{bHI@xSB6;c(>P7?;cCUu4&yk}SZf}TgQgZRs~-6;0&ODCov zFG~4=n9x1VCh>VCuZI4R&bDvEZ_iVE7FrB3W636-aRa;lJKHg?(;j+M$N%gC zdT<(I1t+@I4Oq_-n!hD5$8NK)|an`6^Bh!2?kBnxY)o_q8Ckl(l2=qBt zxP=g=vwW-IeUDa|!za+Hc?WoD2zSI5V%6*yxmO@3Gi|bjHlMRq(@Ef(?NSxd6YFI$ z0ySKAw@S&Z2(Lkw99L&Kjrk>F?6MOSPL4CkbpgR}T)42!Rwe65biZJd?mUbhK$-3N-Jc(GLA z^WfHS4r$;S_u$f5S9D~7bousM+WprH=p!HQb-GND%x!Dv`#m8T_2KC0(eN!x8c5M{ zK0s_tV=?JXE{c`Xa^3%$cPU2d#An@{O>g9`2W6x=UAd*$X$S=s34{mlWxoU+s}ZfW=eucT`=#P)5j$F27a_#h4K7;H1qqiP{Yl#&5atSPM*3q2nQ^_~^z?%y9% zt0i>!AP2U`EV8sU_fd7f1)_!_)+^Ob6Ov`@o@(o3+K{CeorS&g1Ui$wLPErR8cpqM zBFi~w;dkj>TR#ma)r3If7jSIcL?@i*8HLIjG}ytWNv*kS`t>?c;VsPy%d ztT{S}ylg|}xDH%rt=B};KIq^BpM*7&YzKGNAggKg=#i1|MKF&hrk(6Eu9YiLffj3;2Ieoh9RZyZMZxZpBD{ve3dH*(nVZ*AU;ncrDV_*?>f7j z;jr9s@eSi_4x{s~e6KzI1{9zVD*VEA;&i-aiY0GOE27 z5Zv0hN;D^<*)=5PWBoawjiY`zqGlrP$^#9f2q?WXU3|`is&(FK$#!!+%QF-sltzZn zOLx{MOpRUcyq$U|HYLjZq!lky+S#8{ARn01b6$U40If@4PFiTHrFfTQGc}KD{Ji^) ziILm4T&9NJO)tKDENII>M9-+?lGR-R2iT&Q!#>8)FbwK+)|ZWtdj~u}z^ZKsME2vp zzJ@){`_ss6J){&b-!MDW4}o07mRaXm2sNUql}X}KfwnHcrHuA`gLr9c6iE+%PG02R zmCvQ8P`%uUUOq-_wg`A+1Yk>dNt_0W;!~ozm~A-M9XQGJg~$@`$AXI*5T+j5HkFm& z-wj#no!t)I$uO@*Ru`qp?WPAo9r$LHN3*3B+f~RGb-eXv#zTf$nBWE%Zs%7V8>!pe z#Od}_@6yH_ZhIBza?-;)_4hCFZQWct8=nUd5%f=jQPK-|xGjj(@|& z(HLt>K(uMm79dqH-K#s$(n(WtrHyfoY0imb=tC_DvMr%{SuL5rcXa{Yx!hx{Pw}I! zRDV)G$6~#%5o?9X^WBM@=UYV5cJzS*Y}vQR#{A<)N|3i>bOUb;zGjPKpTGJ*(1iuL zV(OW~QocC2>?lQ0f0_k^IlH4uYH_Hr?12lFr*%Ye@9i%>zn33iMD(3_7n6W>e#VnX z!w90B8I@*&xg%(G5@~;H2=Omg*O^p;6XUj;FoHqTz6GGS+~6|{M~stli6=WQNM~+& zb-2wW`V2L7g!3s#?(!B$zkAY7*Ei(nhU$~Dm}Up!$Cm8#`x|g{FIMZ-M_NC*K50QZ z5@)KNSA_!t83VZe9dunz%|@)BGGBRR=iTgXT<9Pvc<i7XB^w5HavNe(1$ZL1ZwM^+U)Kc}mBd zq9g*;ZMcP?{u$%gqdH>bv<(7eZ(2yk=A|%U9oMu=E7K=IA9Nl(taj>Pn-(Hj|>lc$Wio8bg%| zBD(B_+YAhy(*}}fVuVJG-n*3rb1vAK-QTrnDr-ty)+|o%oeeqdFV=gMss~y3)vMJl zw^~p)+O2Rp0)NJyvDDywgnIGL> zT3b?D&!*X!?d|j+8@b%F$f;Rr^D~X)KNh4stgB4N5Ic-xeT`K~eBqgfz)(_o4!p0% zcnzLdeb~*yoHs#HNdY`)uKhk3Eg4BbnKI|<_ylR8YV`F7F6ga0X&TUGwJH7*MD2iB zh{ESNv;a3yO2PfW1YSSk}j%5f&q(-Z1{9 zqCj0pRJ%kmIaavk{G0vzu(sPa;-8c$StVA*n%z?L4ivPXODi_sz2GCqbb;1rM++E1 z2H0nR=du`2Lpf>xg3J!g`>U=~azK&s#?$WgCrs_5)t;$%bz9pi0DYh|^+zmuz zx#eS_^K~Z71`VT}HGMz+9=%_7@fq-sg`9@ZwHYd9q<8LFxnw&$l=O;5$XKa)a?j=p zWUTXhEh-OWN8g7pN_2}DuTswwR)v602y3BqX%(|mSv-;7$PVHJK|+rp_7o3u8qp&9 zHZpjjIq{0k+xnQJ0aLxe<9d#jXIW}lhF*ru8Q?LOSk(MUTaJ6l@gS$Jt$q3@l^HyeXRd+VbEb6#T5rQ?4Z|2t$ zwIS`&g2;xSV9@AAF^XYQFMHge?LJnhcHb)escHh2Mc>X<4KjNeO}72!LcthDa&=HB?0XB?ZYnxk1RCn%)P*gAtE z48lbRG3A<&meYdV@oI@ zR-Yar6o@doP(4Y4jDgDE%wTAsA%Y|>ebxUqeg6$R+0#jQ;DGAgMD|ZlycIgW*+dPZ6qee4iD%c z?Rd8ui4HHt*BOEoMWCJ~;M?Y*>eG;@$|J*@(e?QG~AeDL!+HZq9St%k|)#H4RfP?=^eC)n#~2`=eQsl{uHMT7Yg@_?of;jZ6{2oOgR~~tCPca_>xZi8u3UEetJg@G%tzi6NVBS zZa3Md&4-o-j@2VZwDp0SE@R-$BcgXqR}tyo~~t&XnlA|AOlU$c>7wYOs9 zW!t~HR+|3T&M;*@@_V2psM+;*Y08)67q7(+>K43Ys}gy6(f;wGn`XZz_o=(2L^Zvy zbrsU@Uezn)fGp}F5myLRq_C%QqknK`&mz#&6jBn?>^x; z_vSfZS+MZm?&y6zyR(@-^aHD{>eD$rd5mvfnSZXeS}B>&yd8M%%kGUjr;C{r^7qHB zzvOxL#|inCJ5#=NhI(p56qh~_+Tl<;o8fPP``*<@7nJ;MGOnwZEqD26^I=OzP1uv~ z(mXP4+=>Rf&)zwFAXTZTe5T&D>Z+YNX49E!w%=c9;=1O)OM=CQLpk+-ncs;#OA@SP zRCZan`+?D&|B37Bt1f+y*0Sl81_l?yKi7%}H`jcUEzm1S3-w((?W093$8`MyY5U_j zGd@)PoOOT8eO8;eo*7}T)6&iNyX8NAy36q7J*U0x-c!E_6vW%@%GT&+2foG=T-vq zafr+C1O`fr@E{}uL=a1zFzH{fkxy~`ZZvAx1UFXp0tk=FvujNYcjj203 zK_nplf#2VIN1pzyVX4@=dCICu;Tw1k8zhBoDP!*Y%AKQW$UObmlQqC75_|n}k0=Xc z)nl3URm|N#|Cc476HsQ-_#mnG`0(-rA%Z^-E%~86Q;${W-P0W_?%ya&Klj0=Y4^#Q zZ|dz>_X$i4Y2!6yJq=vuQtz5={b2op2|fEyg~x5&wFKDv^nD$9*Z?fa+jWg0=G5HS z#yH8WU$1vOShX0`%-Jx{geMr}tV=60beVJb-*kI_pL#5!c5;EA;dWrVyKi%xF+{

xU_YY?u-Mp_x;H^$S%PF+;*{n~TP6M%yT@}rpR`X&FYUe#|El>7tp)Q! zf(tw!?|v+FpuUw`)%fE7%0=O8?yUyqsFNx=gK0z8MBRVzopr0Dw^j8UO$Q literal 0 HcmV?d00001 diff --git a/augur/static/img/auggie_shrug.png b/augur/static/img/auggie_shrug.png new file mode 100644 index 0000000000000000000000000000000000000000..f53cac1897f2dcc56bb375ffb4807303e613dfcb GIT binary patch literal 29646 zcmV+3Kq0@0P) zaB^>EX>4U6ba`-PAZ2)IW&i+q+O3>vk{maZh5zFea|G*T%hyQvl(dE*59kqJ?%Oj6C`9t@g|Hk`0`22qV{8zt! z3V-~$v-*8OGRh%Ki|XT=jHb|{J2I~fBd=Y-+zhy`$0dh{QbbBD@XodKQEG> z$Is70{(jK-zMziYeqQQ|pYQwo!nc&~uWj$6efVBXe|JCsAh)`S*J-{b;>V(*rO2~Ggs=aek;nmAJNf!c;O=C z>AkJ;)%aETb9-NdukNScr2P2p7hgM!5Q+YEA%_!YxWV4{9X3nM(PHC!jGN@>XFatz z;!es_)>pW(rIAkRBx^@=De>d@+gifA?s(VRp>yREcxeoLSl}&x`Q`rZAOD*#ckfn+ zf`VzESh23Ch_ei3PQUXi5)$q=Z{-E}$JaOg{-?rHF=t+;Fn4Zn`t|vURl@JJm0q3` zuPc22v`}#Ed_RB?aqYrlLLvjchEzfgzQx!=AdZ~`O;#RLj*AS0QsQBeF{f0cYq3XN zoAlSQ9lNoNJL4Me(GdslB}Ey_0w{vhGt16mr`nJrMoi9tf}T&YOSpbZrkiiM^|ssZxbwHHeY5&+ zU;jnc!Z&O2WlFDWzh#Y|y4HStL=c=5<&2EQ9LRW61}NyLocSJdj>?>J=0~I{N@S5m zxp5~bW27)!h~HjEi@jqqGDRuuJnRBG>-}Cm1tR3;(ei{38p6BAjIXA=wtJPaFVxR-hThVEI_1soH z;j|kzFO74BZdNI@5(i;h#T?fPW%2SE32-7TsU2R*WpWwuj(Nt-F}FPvSBk5156x1k zYqXrFb)r|+adS5EeXT@m*KW_drc0&qH^2m#AFIZGN@yqK8cFYtrFYQBdbcndMKg~z zVzf4_+ehN9eH1&x>;5;9OFf9FxoAJ<&Qqz{Rs!qK$LczBpKX*33mI;AB0;H_P^g#1 zeNh~Rnzl~7tryy5>`hltg159$h+UbX!9K}SqBdje`*Le6HtBMsguI58v!~^LYoq|m zJ!9-MY!UV{2XEeE*$7SVIAyec+HPT1HwbFeEm`qRi(4c&(%LqE+z_RqRgP&hugS}Z z&t3UkKIDD8U-LNYfbX9Eu9NDxvAzf$iswbD6iR9Ow)47v(PuN$a0gf^-2GVn5|`sI z2$}btQhJvZ%nPUYYVlOAlFv#?a5}rx5q3^Ckxd&uU2)$E1kT2#7L9UUW)U<~&3Uxd zZ0t11|Ds&(vNXr7;f_O$>jY$AXKrz?RCb-J^~!CtLIbuAGUCuYR`};bU0yzg z1uJp|ylJ%M&Ls@Qfl*S9>L90-#eD_UQ=w+KU`b;FZsK)tm;i!CT(pO(l5e|)TKCY$ zLZmkxSK6HW`kAWTIRG3I-d1Qkb(+v-A@ba-(Ss>?D$aP5uzAZqVvL=6r0D4xNz?_L|$@Sd2_omb??=Z?A z9nOodTUQAMHo`K3N;ObTR2(v`5-JqMQD;&Lq3&D(r9I{5B26 zRu`!SpM%ycGl)P4!YJF9YF{cJ5;by}WoHCi(cSwTcQ$DJ9;k6l2QibS#v&F))}qZ- zF*?^*$Xa3`fmRVz9R>Jq0?e`z19T^q2p`y>i9&Q}asUn{+;MgUE{p{7P>s3nD{@Ja z>B|Fa_|-ryb&>`s@RhZcHG?FjTd^TtUfreew{b?`fFpF7rFVh3LoM`0=?iprWp3IK zda2+RfhkZ&P6R#AJ3}A5_ZoBt8p6E2A!&z)mFb*I$(mH}Dfv1=O4a&H>JnN4^AKr7 zoFHUd(;DaWS5_bsSnN>%3B-=z3?Rupsqo6S;>O|cBo@ik`~hg`bJ~*YLQ$Un`rJ11 zY5sL1q9PDlmY%ercL!KD2ZRB(b7|Iy<^h{1n4bNlc{~WSd%2gY!yb*%LF_h??=B%f zxKG#z=B)z8D!EI++sz$lBhj~`8QZZnRAwE#S7jxXK6SkC0(o^unNt)S$SE7Gw`QZ& z7Id0KWuTcs%D{M*6(s!wC8wnQbYpvD4wk%9!};`03a30CTv;&E$@9FhBpPNO;w-KdDx8^nCDrtG&lepkj+SJ+c{iA_38RfG z^6~C&il)tl21io>=L^w!Q&`e0#F;?!34j$a26`Jiz|kxdvZ?z#(1o2tcmP2dpib`cf^ds! z`zQhuOQ9fsOkLQ^}YpUX@3hRsxav5 z(wt!03KgK?3E)9EPYL6xG0?BSqq)xUbw@xCU>Ll;q1<6wgfKN5Xuy55A)|mk52Y-G z6T%)}um&ZHKyCP#hJ5;b=7{~bi(nC4PUQC3?!2zo=(#B`mY7wnMJ3$S$E<{t*j*IL zTo5~|wh#PM$e>t|a(C3_n3+onwf$$<)xEd^c^c5;NcBjMAil3h0anoRq8{NldMA6K z9wPBNWE_b+6w^otI|CvklLe2w*x|e{(WiPUqTi9robNL=bPZYqw zM`Y^hk$(Efmt;}G0nyPD=!7iypn-whNI!%tYhZyUnTAaD2~0;t8&a{WO~K(oHWr!* z$wmDV7DR6*ZY;*AOeFtBAKl)$6b*r)qpT}YW*O)p6JFpSq+l9VoNW;oz^4?yHkJ#s zlc9bVJC55pl)EJ`hl(Os(?YSL1@C~s2l7!)a;8{Ph!K;eaT4kdjS3J#aR}y_fZCI5 zC^Fp$1e!h5h*%6Vj+Bl0gI}!HDvCA$6RUEcm|sc!fvW*%P{WRfnFpMrH4okxlh@Kh z(&u4u1KRL2o~7beRP>Tsi~B@Ust~Y`)DD^+p;gogQ2B&X5DkOWb&MP|0M()%9uOvB zFObf)$Ur6T1A+$Y=-)s`XEV$(tTxX?1&yPwLQ_rTBE|HvwiCWas1Q$?o|JyL)-ou4 zDt!g)VDj!nDd^Uv?_mhM=#F3kvE^<$E)VL-a@vwFS0zQE@@ERV*n%N&eVE+nJmOA` zTR=EI6c)h{P}dVrg*~n1{WV;NU!X|HN|Fwx z`J57j=MU-uXHu~@Z2{{bEIB&^)s>%}c350e0u=#3nsx~oCTRRzeY*UkE=eyym{clm z=fZFD;jxCX7P=qH7pFT3Q5^7M$hk@#d2kZBvX>9=)821rIpwM4E211A`Fx`a%zV!u z|C_rj$PpUaw8p<$|I!-kXE!CG9vLFyaGi5YVe^e3qzfSMu$UqSZ;-EmKmlvY&B1oS!xk4iZbptvGF8@~o#>E7)? z>;t^$ow~={L9P{iqS?X2qLC^Bh;&4)XH_3zR zT!IwCz5)w~o=Sl~)FBmxsz$QS>S*jfxm*=mf2UydvSvw(}PYBci z5)xAFNtU7J)*bQ%#e&~iZzRPnU`~wEpV|ZgIPjHg$w1Md@(AQgb0FRSL{4z`anIP? zz&#^tOO>QH1Qz6=wzf$BV@m)s`9K<1IvfrBSc}Tp-C*)Jl{%D}ODNR@Oi=bHl8|7e z1b!C{w6ItxD#RT8i5mfhCMXKE`q`6=D>0aFCt+?{1Tf*YF3NA7r$DGXS$M)uV>Jow zt=S8f1oMal#tYMsz&jFxcDI3ZK>BH;LRANh14ie59E2|lM$u&Fl~1LOm!_GImNYrd zs%+FC8XU$r4*qDu?9)1NXIbT$x>|b(;1bl(N=n@(!n zOMkW^r)J0*XaMp7C>!G9xmgsr{iW^0T;K~_M7h9LDRtx;>JMGq=~M)x7&^!(Y{?hR zHfKjZ(Fw2t;JSOOfDH?qq}>5Fc<_Q|q0tr^1QXjIX?3Ix46X4i<`P!c^emzjG##vA zRk%UBcI0KR#;pMh{>^z8>@~|7^1|d~@kZ*WCZd`U>_7*cf<4bm76Z1N*OP>RNb}JV z&9lgW>K4IoS?|#>Z5diAL4Dbm4uGrSF6l=rlB7oMQk>O8|CiTp7YPPLjT;BRH0{HN zH4W^j%H;={=0hlL6fEjF9L>-X0g1-BnY=V)^#&-N#&L+tAEKXb|HB*z-0}i<>y5|;smpqLRc zi+=%N_!pe^zHkrhNq01dsgkJexnb|XE?JiXfl#|7v(KmH=bJ+O$SCpy5Adg+b2Ob5 zU>;9<0{kbmp|Ei#v;Yu81d-}jrU9F#f<*Lie<&F0h#bU=LYNKA?k}J*M1=X-*i3o) zyPBn}!PoMba7DF~?Ve}(*!ZR$Hu<_Ke^;^VtDvJYFs{)n5#WQdsn~ouAA||=AK$eB z%o9;4p*JHHbx82jTu=pdnX8Mu8S5%&d-K!%*~y;Jlv_}L{TQih76qjz3+}dEzs71>frv0Wc$K*OTEFgkQS&8 z)gO!^Qn6_}1~_8T_(vE9lZ#a)uF!?Hkp}CVGU?jgfn38=Kd9P^-n^hbl_+f>9gFdU z_hst+)nx2G-iesLO$7{uiM;=#0ihAia;oEg5uWrN#sC~eEl!X3yr>cWem$)-7VDw= zwbPTK;o`MfzJ?|x)aFHO?cflohtwh)&^MR-% z(sTz=%stNHfKssTGemDPYOqH=Fe6Qa<2;lerm>5)DSAk#@Jm9hM(ix2fxuaNW1m2i zaZ#iSE9DJ)b|6awQOJSLeVk`VKLo6Jgx21e|G2cVRkA7r0&&8SOrRQi5z9)i#t=&xc|4l^pn-Y8ioCF+N3dGQWPV)Nuqz&^cI`YB4HIjmf=d+NYepvIjrBK% zM8ZoJ^h<y^Q5XOhWrCTg17QLSQ0jxMHH`vTuRRvg)Bsw9Vs6(Ql*?`H zEY30tSHV!3!-80ge_}pk!A2Z<%$E81c5+%(ARnHi+;p`Z+Ua8u4=wU9%;h- zvDJ$>(gBWDo<5k~~&pvL8q$dxo*aoX06aB0wJl3b)po`7NQMB88mzs+B96nwvj zenm(JD4Jeq+zKoS-csu5BP4o4x`iXD?O$B5jb#!wf!q8+#`aKoe|X3}_A+eWYi z1?8)BmCF!w#r!t8Frz|Bg5%Mth;Yq7JBviqqn$2jk1Elm9Ki3zG>ohR0@ZNQfn6P` z9kYZh@)r8D_%I!I!k56Zw|vHj++e^!%e4;xAP@LQ?jEVtz1tXZuPw)LFsH9MbD?%n?B&_v{_9=1Ogf^;sZ?%01?6Uz^?H|>teD%2HR-Q|0DP$hhT^LDgdK< zn)HR{JDN5i-2vRBX{?K?MH-SY+^K?k`}FwF19Pu{5?4bF2YdzD!bMQDE1$t&$`;i^ z32CF5e)wxaOZFy9+ikdk=NdZcF%Xr<5_YxV{Eqe*m-Yy-#F|rWYWhHz6adB;iKaO@ zeZ4enq&yt$mme}yg-5d_9gm>yz@H=zsd5D!3c@UEI!^CW-qD&4USNQ`Hh!rww_0l2 zRI(rx@d}(5Dvm=1UD|qNC@f|$HRs*7n=goQRq$mZ2%BBOOn` zfp0_zl1gVIkGA4LN}_{O1j%U1HMQv#j`;IXNIHOWBkT+wMSK{&)~w4zVsOVkI?W=V zxr*0L7K{Oc>s-%3vV!DT8I)|*AaVh80FZ$QC~%!XI5>SSf^4E0kj-f2#eKlE7mX^h zKNql!-{A&4cCO1TY2J1}n;K9$_2Az518mRVZDIW3djzJ3MhGN%FWJG>TOLlwfbj8H z8xXK*cT!skAUlPp(Gf_HL5CBTy+N&M%`)6g6^&+gFh=wW~#c>%DLoU)Dt!IgoaAY+vn|O>WaK#UNu2K89_CAQa)0*TvMT!1qJYg;u zd4(~Gw^uW?Lj5d)78YafzW05`!!`Adq?okizbCRDH>2JK>G{%4eE)^Ig=ojIY+ zG{RYlDiFjcpAXtB(}2a1cBqQ_T|J;gaM3dU+P7V&Z}9x`Bb{-g1)41M)F@=o7_A>& z=R!9~Zl^K6=`4|nd=V=%pspic61FWGits7W6_B-bu7xVN#BGW)f=<^ZFGZqREzl!L z#0~2#7cPU3Kzk0z2hDRdx{MS)B8hs{=?v}jSf8!Y)9T?(VX5 zFlE2GZXU-3lc?H2Or*L>K|<{wBs@#2X%(Hgn^!&)M~9UX6QlfnlAzJFZs=tE;X^V6 zSW$y#3tgd-^u{wR&QNa|e*oyn^-t{K(HSGG=tr;$8q4V!3HfvgA}ecd2Tnn>tpFqe zoCa?;f7WbQha{q+L^-26U8F^HUQ?SupxTly$^9TUbzGC^q_O`Aa=L#6v2zp+R1lJr zzJsLaj)z*oO&5~Ybz+>A+Z*(40O@Kk&ueQJi$1d_EQwG?s+|hJ2O`~YEg&;mNi2y< z`;w&tM^Amgsq<0w40WN@NB0 zco zgo*LCqQ6xn(wq%xD>0VAm5A7(ozW`O{o~V1e>$Q-w}z^^o!EV53a(HB?yM5Zmnq!Z3%K+-zq zNa87AT+P<5CB7fr>*^rdgReNW54}+uL5ItXI)4+lt*A^4;Re zNaHDQPAlPV<~zSx8r-q9emayJ5IX@b`NXSfNALg$@C%-F``mPE#Hb(su*jhf@-#oh zU8*FZsiuw=i2?D!Iip2&Yzgg->&`kI9+B&CZSt;gt)w$dqlgm&{x4b67IPKZHYC4@ z^Pqmr?}nh)VXp+F?uuFV`O?*49*5|lt>iy-LQkn<39Ke)S^&_2lo)YXoln4=<9+OTs#ENXKl2=%N)LHUNj&S(QHDyW@w7y(3s-gSr!t#j>-pmk^mK24a# z2FJS2m9Bb|p8#)F(g!F;1r=pV`FlqgT7P?W1WY64qQM@oLv@{Z-0wJPV_|q@u@Fer z5c1goZh1l!+QCbzX=<1sgV}j#_gq8Mq|>TCQ9KQs)mSR8?>gi`cT&0Q0?Il+!1rd7 zq$unm){IBz8n{TsX^=Xk-MH8ml)S5V;9Lm8LK@RpNjC(uMU66+Rq7%n0V7K8Xv3vt z5Rp7bI&f+)^T2@=Bq}i^9Y#P-zkU+6^6yCx!q6DnsE$Je4)CNiW%i?;-k_~DAh9767K<{8#m+yI)^nv-PIVqX7 z_O47}DoTO{0w9cy(` z*@lO}3+s%(6rhg%SB)R_EF0aGsiUzUMzlLlsOV0s0HsFoO*33PAqYt&uO(rF1d{lG zEw;|C5i-PY;RX>HNZ_}tt_KtP0#>$;+mV_*#DZ%%90qj_5yu1Cx1halAdp7K7|u7f zG09+Ufa)f><*sw7nkw+&LqNQ!aHrWxEBJBhBXkyDae>;4-8SMLz%3tA^4==tVu64A|c;ph(GMg zbOHN9Qy?yNSbW|QkLGM5CL(pAk98l8u~_Y6NNl1bs-{Z=>)Q1Q8gTAn#N{J1I-WEp z6+J!%zY!q8cXGbMq}ph{boWGHsswbsx<8d+<4IvYdPaoC*3sP@>6-9+W6i zhwA~%G91XDk$zF+bW&lYqT|i1V&sL>D|f(m)YH&0{)HgYS;VxdYf7X+US*!L@w9r# z1=tbWEK-LIq)t6dPH!TY#*1j+U64*jIcvsrin=)>VFTEYJG;?EuLmPC+<{L^I<%`@ ze0=z@sS!2_f%ulr3~?*|-AWT;sg;ACJ`8%~;*d3OU7iIE07dAbi4Cp@?@*)oQXQ`d zduUU7;o09|B_Pxz3BWyV5zm5mbKofI#7_)6)uW9gSX%qe0y}NlzC6qoH#%zKiglJ@ zuOmxJ|27>^0uB%nz|1?+)OGG(v%mLzzz!P0l=t+323!=*B?c;L`DGO60!MH?CLILV zx;CgHyut0SKt|vwyi(TK_b(C#uRA33e=2zPOC~ZT#;~3qeC!)Lx`XXz{cd4R6Y6!q6 zg6<#9mR&DlP&{QX%k3!zeRgO01yFu8VWlSbb<@EsPP?r zDXTfXX!FK5mTzYPIr~keE}`z4)8MFm{v5DOI)Qp}M46SAyGazfzp|YdHz*;b@3o@y5<%`|SrmP86b(R2d(lZVn*r!S~$v%1>?KCwjtD%37 zgc!QP;2&ie(aBnLLKEj8)LMuH9T*CFszy&o(DtKiQns}LADde1o$Mwsr_#EyQD7qe z6*8XoP}qA?=Nc87&ORj_f!29%X)ew2pKctuH4TDP;7e!cIy7E<#l7UI!(Q}C6}7)R zPe`Gn$NEg7=15>ebYMRDCn8r5LUE?}G}Mr0-okLBsnG-1{boyMIR9?^E& zO5KOvwM#91DqvIa5uggN=Mru++eE z$<&Cah~tW;lRvlavBG$pv8GmJ|D5cG(Y(I2NOhQY3@NDCfCd8=^57w%Lfu zPVEE;J_FZ!eZEXNPJWSIt!wc^po{K#c~jSY5w=zlUqQ*@;uZLUxN-p}Z{azOVT z5Lxx|^Z(=Y5y)-<>04m`02nHg_gag4hyDEKu>j%U9zVffzOZzl2iG?700006VoOIv z00000008+zyMF)x010qNS#tmY4#NNd4#NS*Z>VGd000McNliru`ErY=fRtyFMHk*yKv^3Jv(nv{3AuTNpyWLJsPEPZGwOVb$_l+#e_K-#j1VtEnCedh=J$v@BapOj| zZQDj!Ss7clY+>`}%|}i0&cbfDGkEY|1`HTLNl6L)`t_r_l9EDc zX(gP)v8s;6#$JSl4Q_g)~hjT z)tGc@j2a1(PK`zq(W+HwBo!J-L=r^=K&w_A@g2c%l&CBd3`dB{;17lI1ta)^VZ6aG zzF-)aKZL^@#Ni9#@`Z>VXQ-8ymNH?&1V)Y=$*57I=-00wDwV23`Y9X^vvuoMKK|$< z<}X-4YHBL=)zt*uPK-J|M!gP|D7NamL*X!iPzYZjh(8#_nwE~$X2a=l;PrYref)UF zj2S~|X(@KQoz&D+1VK=QL81NFwrv~BmMvr1vSln@yqKDrnxo!h)~QLi=t;Nev6-|a z8?_`Gv>3Ef=Rcig8J90az1vT%%TL|mKewN7Bzn{`<-GIGYTm^MkmtTH4Lxv1d zgh8R@I2;bX{`zZ{ELp;P@4ZJwMa2>SuGdK9Bpb-FnaH&2$*}0rYovs~uBa?i{Fk3Ri0mtA(5A`A-gQBzaHqD6~XxNsq#eDcW=|7O%m^iH#omtrC(*+7a(*JG~R z3@$2jBx%19D*}8c%veS_;f^dW| znyozk#1jl2JXjG1h2v9OTg&_Jzt8;n^ZD$v&zfIXi2^yvM)K1w6r`C+x9CrqKB;#2 zD6e)V$JbD#Fl( zl$Vz?d-iPp{qKJ_I}b%cpfJNizw9Im)2(PEQR%1?SQ48m8rX865pOWuywu3e%_Zb@ zlb@d6hDjSC5DF3shY3f*9BY=SL=laoMyJ)Z%86O;aB|MYm+;_&4<=mSD#FmoL?RKE zELp;{&pvyE-yqGRXHc$<((L3e*){0SL}i(>DhF$J*Rj9B-Tb;%qoz-8E?FtI_$&h% zd;vecK#)i@(xNtK)DmWcu|->*2O1qb^!VdkaKQzNFr2EmTrOUH^;KrhoXM_TyJCY$ z66v2~V{l#y8CJd0N$o(5)5n@Ub!l*9#6h+u@$9p++GRUd5r%F?eSJO8J@*`sKmK@gz)`YM%dp<549vBml@z~0N8kyB zSh=f~wR`Fa9To_+L}{;Hq}!64-xrQVaC$w2!;yCRT}h?F7Dtq0BpRj47huhrHE1*% zMHo&Ms;a7Z`st_n$3Onj{90a$iP44WOz6|w!E5kd+UfqV?*9-Fp!^>fhe}GgIA}~Vlf&{_#US$@X#ZVFnRK1MHqTAUayzu zo_mh_@4p|H%Y`67NmdeLiZV#EDE@;)p~)NIt8JBRJJ=MP0D?fKEt!lTEic-NT9QaU z$-%IY(agMg^T^E1RD_`?5{X23?X}nV^Pm6RyyaMsZee0c)+rT))ZN(M;AY9j3hEBU zLTDt3f}AXL?Xce0s?}Ie5LD&%`7meO`Q(#NltWZKksp5ef#3f2w=7@29DwX30~7mY zlb2#rI;R^Dm1WlKsbkspN`m2-gEAvIiLBJNMIjn>I*j_`KNOq@+n@&u{&@f_dJSikX45w-N$H$!h|?G3Rgruaz9WP2GO$(4^nO9zUh2Q<|ceq?GBvD{gVFsrarK46U5sKXv zi(bQJrxo(Uo?5=#T1hYzV(XrLnh_6cmf01?<_g zhnsG?iN%W-1F@ZkGyB^~GH8^}=|MEO1AMq{FO^MR0Fq25dS_+CIhzD*W;4(FSFfBoxU{N^{miLD>C5@+?dQcJPg@!R{H8VyRaN+RYiYS~y6lYqw>hxl4CM{7}rhNYa8l6Yy(KPAxClm#M z>ZT?}pLSZe5Cz3zI0inSkK1p*omsPHH4l#S2IVS(a0=rOMR<4hZuZs3PHtw}lF3Z5 zAqoO!gYl%9q5Q!hL7jmQKKOvNv@}H+x*I!p?qvG(>1^1r0ZAn?u_TKT`DseWC=iup zKHqeJwR`ITNKZ~;P*EY`NdlB)Stg(}aL>K>CT#e#vXnRhix)3u=+L2T*sy_AvyN-V z_fbSa;lC3_5STL1&bU6Y5ayc3CN}Qeb<)qd1(CC-OkvWbN!{yuPx&W7G|B@HJixi< zo{P)nq9i+stH$-|QOQ*m+KSsyDecupOvM= zG4Oi5TyxDeELgArQ4knklErC->B{I=I4SG))w5*7e&j>TiXp{CNBnz@!->79m{(tW zjpXFyZg+jBtT0qpS2JzeG?p)4j$WhUqT%`EDrX!NTENEr4Sc+Q9{^dYDGV&g2cXXB zBCS_%=FXj)aM_;~VK@QXwr%6=v(KiYqJkuYmKmc8u$i>VNLOevYxdOf*``?WpS;X; zQj?QO&bIT$8*g-*9SFt80DSY!H;fuJii(N~a&0EAKD1A*pwJqI^-g7c-z)(3Rn?#} zS(rO_ZnqW%IK>W8Em^XJbIv&jfYO{~&L5nIN>p;JDYT8T#TocRVOH&`W!tuGcs!nN ze@&;1#jtSU!e&tz+AEd+8PZD;1%-BFa=#pkGp)E>E~ZYMN>fvlA`FSnoH=v2 zC_zh|NjMT^U%i_~Z-8JZicX^<&8#OU+0gCw_T$0n3-RhV+wla#JoL~*+;PVpiZCP^ z^XARt$}6t~;Oqf*hV@QWW@;zl4utrwyqZn>8=C7n{1}5qVnjh2Bl6RFT#}pp4Q}4} zeh0D)UwrXJw~Ko0(ZaA`!2&M5^isuA=mK`uI9ar2Z*v`o6Om%lanXq0JucGpyYecQ zZLcCdJ)MmkHtf~Pd;fDg-QLg6;bF!%BvhKT(vtcQ2?lS`T#A$cjyC*c%2hp)F)9@C!? z4>e978!H-nRDT$?5@!#v1Mu75{+5c03Pl(?o0^)M<`a^|nO3F@v@0{U6Y&JXY^!YB z-u=2mxej^~{j!tjb11&yaNbZw7&-~R-_P{v)7iCa7j~PG3x@XUky~Mj%+4BTn`^^X zIlTCS;U3pFQwHXuQ>$3CXc6zf|Gpv&oy9G;+`_ltev8$h;i3`wNJ@=|PN&iBZ}WG^ zGBoutC*s5rIqxt7Q|H%t4ynv(IR*H~@g_KETfh9f<) ze@5hIVAN@-sHkA(%$bTXbR^$?`)%{O?SF>kk!dw3Gqj7)tJ_DAjaqe&>l*=q(+3aW z_KRaVAMd>L&V+YFc3WYnsi|T5^yxtCbemE)vTI3ehsxd>NhI0W^Nzp|Eb2{GnvILb z3?L`P3c!O8KBx#odlQXDx$e5_sHv$TH^s#1%C=;eQ;=qE^J&$>bW4vr@tR??F`%%Z z`Trlh`dk2>fBt!P?AW0QLwhoF=1e~N=pzgoN!fu&5VUF)gY#OS#!L`kbYVu1>JyV* z$M_)wjyTCXtk6zjW=g_a4c$%{R;^mapZ@eG1OcWF>xD_DR%T^_Fs@H#tMA9~-f3i5 z^*yF9)RM%6VS|rZjOMV_FlWvj%FD|YVQ4EppO0Vs;uk>d>C!vZtjxv)!l043ctk!L zN!8*d#Nm|eJploMv4cuUHXl2=aA{s9MOm?vzE3{+q#_J$<(_-)p{%ToRI`q8eKM4p zn1Ey^8MylNVp7c~KP@Q;0;3AjnLaZAq!pdI6GQtJVoyKu!M(r!{7fLW-CkQ;s|Z7D zS-yNZk3RY+q9E|IA$drm62z3Kq?mMEJE1S<4$37b*?3g8EWJizfZfJ*6Z$f_G`mNM zLP1Uz{qlRA^nD|W?4(;`7R5_1y_9%XPj|@2;Pd$yGGqv4Wo3--o5|Q>Wf!755eP?c z`a%T55eyoMB!i|$I*c<@Y)l?87*UAtp*4TmdLDS|a|#LyC@U*NrBWr#wRDTe?h{Wu zL0MTD=@vbw6)8)I?uJ%Uk!IGBlWZi}sO^!WV6&Q;Fmzztq5yE|_`z5VT6XQ)m2hV$ z5?&Z~?%c_pcixF0z?6aZ9%Xha^bE`fJ(GtIK`pgB`g216Sd7D*Idc?YXbFG#!yf<` zl$%0MvQZg@3JHZ?r{RndL(yq8t@`}$E*KBMqD70StgKXoAs#;b@Iw|YT7+IBF|i~| z8HEZ7g+`K?JbWl7L+j%&GLy|@B*nJf-+AX9MHo&-C=}w4fBYi=C0uw~e;`)sKoN$M@X9N%P*zrk&7^HpsI7uRdn2h-Od2+Xlq74reBTXI zMk5FUYu2n`&z?PsFdPq$$HU+L_BQ|~m1HXpM}>qzrBX3z_+Zj)?RT12r;*4?jx8q^ zFJ7z&!||9kYZf&%HP~%NiZiXsXjDiHQ4mEY4^5a5LLotx55otMVY79}_aCTtaNgaq2uO#+fyH9!+UrfU8}QzFB4NaE5Yg{Oh=dR#VLtkNB~QJ#jB(?}as6Lf zmc%6*jo^1V@HrZA*H=-oei@QhPm;Zul-xpe=9c;D62@W(hr{IO=TlKp!6l;#C``8` z)FAN$!tAMY;qV3tN23@u5?M(GvXYKIJ*d$AF&hn>IbtYAeanmO%94hNR!>x|2bCJa z0aT#?LMVU`4j>$7%GEo)O#bUXW8=-^!C+`xa$}#fk*2Es)a}@S+F&NVa3IM!z2jPP zbh^b5iA319ZyyH^9ANL>y;N6MBS{iFckZO3q5_*qn^2-q?eOvS_DXiw9=o85S*PZ- zq6`M-rS!Obg{LZM$yO!~8;n*RXErK9B%;wFtMx=QI)rEhG2}<`G$Mur5a**e#bQ9O zQR54SSi5#DEMsGT0}>CNzf*o^0H;8;GU&N)%S?tl_I|2TxdG$>k66*`^A%9B5?P zh~AjAN~*c;Om2D_=X+-E9=>~ zabuf>;lIKqU{1@#oR*2#(ZGSttEt((mb?*@FeayUzA!|iQC6;8$;_EEsjsV}X73)- zlB~#04Www*q#Vj}Q{B)=G#W)KsTg3lCBy((w!Mn)%B$jj|A9siZ+^d%tHu?hRjZU4 z)(t7?mCLYx#RweV@{`qC!deqitsWs9L<)FOc^VL+(ROJ=pX@YtR@Sp&!-fvIMuXKx z@z^Oisw>#_{bG{rMdS<^i6|a9W;-mvY3tUlTy@n|{Q36V*|%jAlE+1MYRWN>rtUm& z5P$)>HZ)S#$6W2KakfYln%9VKKc8&augtJ+hJe7Zl46GU>vKX;0Enn{h~XfbhDtP! znwayjokRg(SW&E$-P*NlJM@~Z8Fos~xr|6CNZA+f6Y#n_Rv6rFHxE7Z5WoKQui3t4 zHM)QgtMT|=-EcID+vfvdP;S>-3$iTp#g>Du{`~gJCMp^{%53WhYh4(@4UF) zFL@oP{4NB!V@l~w99RUzQW-`f9b2wXrIP49d;)33gV?@wA>O8jjuD0}TedK5+BDvs zKOc?PO-dZmTe~U_0FYtTlWyty^x=E!+|;+RK|`?%_=+t0@ki9~q)_1AgnrI&~_HDGJOHp{{Km`V*y z5Z}U%8fTloV^6J1nRN+Hx-E$bLk1nQ?nB3|Sp;NJBC3)=l#o>-vLJyd0s@EvG6Hk| zaXX=Kgq`1gir=1#TBAp8G@&t=(CAI*%vQ8|Bkj3QZcfXj`1A`X|KaTE7=n3q--|4-gE65CwtKoaBTUFpZu-+m};;5aCD^Nu^}6PDF}wa~RpL zZ}XG6LzbkXim0RoNlo-{grg`SM#3P65h78Dh7luv1UU+_3{e>YnOSq+qrv6n_B-xm zM9EOX!2m&@hk)COtEK{PeKjG!8)Iq?rnGFV8Fmawscp`sX-rC?@U-*Uz2dXB2}5;t zHP>EyEwvRD*!0>%$`?Po52do|m1;_uL@e!9hCQeVL&71cRE+9hLVkApXBn4O5>d65 zsH8(yYe5iW!A#*GV$h4|jYT)c6V%{xI@!9rk_M*>Q_8WEA4ftVe9i`3bye(N`z?M~ zJ;}L!Ny+bvDY^AaBXep7{m;I%^}?`!|9-B%`f6O&Riv~si%ngV10bP}R_ofKoV!XC z+N~_SLOYqtMgtRu3?#|iPNDXKKvb>B}M&DtitrLbFJ9co*HP;ZR zuOp=uhfAW-DE?rGKq!PDB$hB_SoQ3vcC>B%YSAlU(g{RvMml2#^h4c7v6e-Vs76mz zV?b8x5W)e(kRP?P79kR9xAxjBvE_JUV`Kc*kXA=peqYk^`{HS;r)v9JDmO1DtN&@F z@E4j6MsWvyqC8E2!PMiC)7eVzAmeMi_SO+Qkhw+yHLp31bru z;|~V$`U8X`;iLY|;qd}ckYI6DaiknXO}`kwjz17Ch%h8M##`|RGkd-mLOE}e7&)1=1*ugNP3Mhg=eM@DtbfQH=DrGOsWT?m{?RH4p}A zJawKnMA-~0PLVz5j`tK#brao5)z_S!9}u1Vv}3a(d*Rs3m0!7|p?)KL4Q!v5fc#k; zub|zvwr0OZXVruiD+F(1O}*6R8=o+-#BJo*zOsB*X|PdK-R~YG-S0>#Rx~0(>Ukmh zaW5iszvH7kqRv1a@ANRB#*BXF|L94O#NZ2XBrdy^88vR79fy!>7m%o|bcE)rV$DJr>ME{PMS2ASJaZx!$RkiYIA`LoLl`l?JD_cb_zlu^4 zyO_#UG)Gz7{MetyN5FGR(oFOFd}J}kP>Z#|HNkAsugOjctOXYg3mR!OW2+@q3>6iq z)HmYTDr7eXFbSa_e#b!!h9|WSBK$_UAx8@MVa|Vz$^$^f)to71bLmP}`eNlWj>onU3#S?!QvJdPqmWv%L-bS?OmHa zA3c&7qgl50;~HUHVF(%uMc4$7(@o7%K zSCphzTbf(1sC-|-WjHk0n>7on?B>~_rDmJn9oJ*|{G!mJTzNzVlTfMuJJ8~C@@Av@` z7&sr>UXvB}yaW7bc%QtPtzb(uIgYBwQFGG_rW#6qfr_8!-9~t#FYzzTl$#q^sE{5e z7elV^#xJ3+(fWw^ClK63u8qyZD`s6?t=$8p?f4bJ<7B2K3wSGCB$Bx`(=~MT6_M6g zgVsU~#gYiHXdfUc1E7s5Cl?_Gwx<3@F+Kb1>WJB}7L0KuCTU1>^5a{$!8W(I)OpMX z4ASB&YKw}fetfheBjGyCRix>9n5{+No-^ScYANQKbgr#9Ob8&G{;FZ_B+$4K7=1je zmyJYDm|EcyKGi{1R4by zr3O(}zpSdhRC*ND&+ahIhA%NOX(HE5R$3`}T+m&WiN_83X`1Gbg9B6F1CqL{BAdTl2;nu;@)iP46a^?6b6@fsQ$7MZ8MheD+IJquGMmr+=4T^`GL zthnLM`rIbu&tC8b6e&v462=Lnk>SO3Ocbj%ylWoOo(`A7*u|z4-csH{v@@?w9JupLi#(%0f1Et2P&ROO>v3T|Q#Rt~s$1+-RtP-r6MJFc1>oJWQ%=+Fl1K_<2=9)(Y=_SO%288^MTmcq@1Tyiw)qLMN>V{-se{^7qev|}hC$j| zn6m^!Le_hS-EK-#@|HyfrXVzhs2tXCFo8^OF>Z_b2I_EE$8>-G16R!Z9Hjj05S$8mL%g)yzr@^yqfpE$VIUl!S|4)A0;U zd(Ed*a2eI0Wun-snp;rw`r z`f=;vjw)22$@%1qU}?->snW5zs;&wuh42%UGW>wzo}zTBJ9F%_LCMeE;FW+uFwF+*1$+3KDFK{$2T?1e~u{sg52e znTo$sgQ;QYtAe5PRnWH5d0L+tzM7J~K7d$5o7Z%W-ADApnl72pMJ0w`KxnL(*Az_@u-TwZ8%xGWOK+s?++DD zZ}!L>B&*wjYLm9z1ZG&W7B0l+I&x&9P!Oq}d8tNTIMz9$sQgg8zD9!G2%XAT5xHcd zXsm~-nr_z4pGJ{~Cyb`P7`ra!d8L`1XB{sKI3U`;{(iwz!SXyiaKT^Usg+y2xh~BJ zFd9ywL;S6gOOGrgBNLt@45|g+U_ZYPlaLGB=~5!W&s3(wnWe6PVU&dKXl$UZvv9Yb z+o?ID8TCE2S$*}8HqjvG~ycIPJ1%3bi%gBeNsdUO5X4Qc?OghL^isiC27 zk7YGA?{^mF^%FHqb0_DI4Fgp>P&H1-N^zAnov^;R^E`Mq`|0#q56NIk&f*ClQd4-3N6_3uL_M1*7XXk|pQKU8%Z-1gr)u+WUdTF?R zT9aDiq7mf7u|THZe5oAot2)(|GGHY%_fQ+n`o127q^Mv7gdwm$3pA05f@Uo#X(&*9 zv}N0M;1nGgS2Fq%GCCuqIftbxTP^}f{N{7a?mb^ZbjlRgFCsr+7Ot=`>C335(J?WV z1iZeKm9hd&7V!mJ2x3@~WlFEAUAq@Oc4mvI^VFvyS-E{Qcw^2t=|T?#OZ3(dfAiD3 z4_oyTS&b&a?b)Mq?N{ZimYULHf~B!dv2(aA>pngi5&X|}y=RsC$H2sHv! zFdT|;Zy3r<`}0cSq8m9Gf?=cPpkiZUYLcvVQgd%Q!i4{~DlIGs?$gTMgG`5u+E)*@ zQfAIxh66${pOULD?$ELJYmw<{{3u;rj!+zmBst1*U!UC+RC)T?OZA+GsLEhvF6yHDx9gLBV0}NYG{U2KgnwZd?+W6DgC?v-@Jkj3raoGn0Zh5X?mQl6)CW@wtyGOnbo$c0OWz zSpl_!Oc5J#$2A#)-AW`bNvF{X;MyP$8+kjpQm&8R#+)&vVWD}$?9l3%lOn1uZp zD#XX%sr0y6#1^tLl<34ndKuKfuD2}Ne~|qN!;`yO7!!jGt|oxlfl*1cp4q&4X!CQY zQNdz8xYCq|8wHBgV8}+5_*ql2+T*;6@wua_pRbtHdLJWc{DMv^?TiiVF!O5}PA6}! zWtGey{(P1vQp-vZ4%s>fQ_lI#q_3#9^Wh-G)+Ake)@v_!AeyCfa88ydUXLfzQnmm1 za>KFR>Hg`?*I)1jLcfayNS0V8ubtj%gEv!*Rw?WIGHoHk%JZX8=i)SqM)!T8py?Akb$!<{mAs#t7gHKON3_sX z8^u7Ji}Vv*WHELr&3ZJEAvnxO*N15GO*dNKVHruY=6T*$7Kw}2s<`y>6)8Pv6)E=q zT-8E8$}Mr-R{CZ#J^=AgF@nhS#xX`#HMei3;*$w|TbV3A8{)4X2SDyPot-`btJ^-=udsdnE6wIVk@H!GPGH_z z|HYO{!JfptF%t-QI&_-8%T0{y``_+{;{G57JwN23JF$itd;)WC)O+LN=jXj$v`%94j< z|KcE;W|h}G@MDt91m?Vw#8U5ptSj}QJIQ+chq<$^*XPraa@^lcA&~3;gdT>EpS20| z;&`3t(0&sjP?@1|myt(Du|ia>xCK_`Wq6Dp4N-Tb1-$GUUvSvUN;Q8L`EdpbF|X%A zRF?1W-D>TVj?@eGNjwpMK~S0)EWo?C`3;>IF)q%^ms~y`GrT7@tr_YVl|GQXquF-x zE>GWabW{qiI_vfE+Vx@2jFzJvMeuBQZ>IeRSSDisyAAvKmk{Y?kHhL#s5(zO{KOU< zeJpWuI0i@^Zk1-#=61;DA_BJc9o5<>3WoquT4z_gw~hKERoSH+#iTl! z3wDM}FV;A|?T~@E`ktS!uCDAoQ1oyxo=pImc7J~lV%~8CcI&(UaJjl}Mxv z-N3^k)*X2P?C9HZKA)GH*ls}lp!kp((f|HeN(dS%nO7DXITMR1<}6+W9^+t8#PFW@ zl@FTt&!nQcjoo~bfC+&$J3;#?Rnb5OYCu3wQ(yfFmdhQ&edq2xf^-UbcG7;z4sHMv zc#FYOBz`F(ulExft^}PvPrH~Ij~GTK>(GKCc=o}8VX9lz^7IX#H`d-X?9&=ioOu~DM`CP?_9s0yhO{T%sk))HU}@i0RI z6k3*6^ghPACusd}>ws!e_mGYz;hzGS$Br=PXv=SR+d*>1IDDP6r~ z_pu=WXv+*J-0_g%zzZgq<#|VmJZ-$iYqIY2J2F=d-$XTSj!PH{18kiafvEigCO@Ff z@RAcHu<2`|)qXBTtztx2BZr{iB#2({p$F$NFC8I-QTT#hrw##)pLdpS^RAx3ws(I? zmCV<{Y4DGf61q$~8~1o;0@Y#n&hPQ&_+~!Eu?Gm)_m^5(zyYf(=#J)Lz!zHAEh}y# zer#`9Kg1GEj_!w{QY}o4;IA~o6!JlU7`+3Z3`_h%0A1`eGiLB<>xt3OB75hbZ63XsE>|Q^GI2+SIUQDi>|TnjFip*>TYzhb*|xXsGsTJc8!LXL9^zOxD?nV`eT-CBU6^(ZZ~aGK zWdz&Vm+L0c<-iLmU{K&tIA=l=`>dfy7t%;02v5Qd#Evn7$QuR)u z2zZ`8Dh$3)QjtnhiPtZp#*5=a==e-Jt6*=PrK`&Gy~4#?`&ozT#+8{!B;!)*O2FQV zFSG?SPBQallHo3h3`n7Rm%qFCq|j34^r2mO(Hh*Kr&a0yd5qtV>v~C*BKCsG)!sam zdLiMDbroYEGi@LvrTBh`uDF>gvWQziZY)nuZ6!84qg%Sd&^_SR2!T3X4?{`i!KH8wx z=D*AL%dUQ`+`@$vCFNm?1YE%7+LtHBQSWMYj`oD`J(iu=T9Z)Ae^N#JZyRC6ejR%B z$Tf@f?o6*K!k8@L4t)O1fLts1@ziq#%s&rh6M$+2xL?(xty5D}g z>eDVE^WB~a8NRiKY~7wbU>n~lpnqCb>7IvV(&C8|c`k#cXT$LyXl}7x2pY3pZ+vtR ze@+&sXaMyu1&Iq@v}b#>CWf38ODzBU?RR5Tm|eW_tgvuK_W%r_q^0tg(i~M~nN%Bena@tidL=JE9T?Bm@OP{p0nvhh()nQE@?w3I)4+?S@B`=tqvYuWtmeQ~$u{3C2GCsW;JT zbEV6y70bY#y>1JG~l@eL$usc{;8W) zW8%qSZ2X@!fF|D;BUf=q52%ySuDt&|-21}49MOxb`I2F_YXn4gXUR%BJuPLnt20YX z9V{%Y@5OK!3lM`8J$-)8+MP6kg$*2g{CYRb;jncxaA7-|CzNed^IdX3V_a8OHQj-- zXf5KKM7N$naC>P`Q;)yXtK@Sh5sVP3Gkl!+#r9B@cJrIY1$ndJG8-Kc@xjT`v7rQ<674sG<%ahxz|O8Ms#h2on7%v+0_S{=#NGnoiuUqm@*VjAqj zv_z2uMi@4CIts~{bwCXF+P7|9z-HX66RM{ie>%CMlMh}R3B8@z-I-7)8)3&a@MQx^ zBa^$c4cR@{7FJ&RTazK?_fx+-bpNQhK)^uSgb8($B$&C^KE1suz?>wAkMSjBHH2W$ zs4?9hObqEkxA(oV+x7ph{(Y&8^whsKvhno>s=e}E;Ge{OSR|Z;q@+SuMGb@xy`AhI zwGEk6?esplzurYJ%$=@(g67gd;E`%l*2s#LvZ~K&5NEs z+{2BZ*gElwMwB2&Cg6-1%jR+EMd2Yu2$~YrBSZde38Qf>JX#B;+>fbb}!8IaPk zA2O1n91(RrQukKa39F>ra2=?%*Iyn_;SJrqU@cysVNquUQUun5y`H1FCJ8V&Qlel#d~IKUq6p-rmY+SO?Y&*PAj|rlPCFa+P7n`Z#4w@ zano78#01$ZvJ&pjrQmA`W&Dih^Be{Z`Wi?&-^*J_V z^_or`R~bTV_spjI&h~bl>;6Y#@4sXa+{cy17euey!PwcUiIWOm$I&Esw`F^GzIyl# z$hdm0=pW-X8C!a$LDsXa#2-X^kS}<(PYrl21yAQxk=_HJ7lu0N3BJx=7 z29z5frzKjqV@Oxm4c(_xpUmK>(}Jb5ozmQ#=UZG)+l_QUu>n@+a&KY}sB+0f4CofK z6F{QzDj#lErvCl98qUYmDMq(i`X}v^9J>fn{AXzR2IIb{-BCd>gM6=f0heb%uU{y= zWOe=jI0asw_(!e@y%NtV)K4cU37*3MH6ibgk~nn}>)>zNL}j#8ECbqx)`B-3e4S=e zy`OW7agZ%rTMk_L!4*T8+hKLISSC4QnBBp*InJqdVzXl>JLPwxa*%Sj#M{x0#j;h~ zRkgOQIDtQn7H?Zuy&1Htlk`OjLdghXzkD3IY_p$r0;5O6Yd&9ilFP%hYcKqF5p;$Ql9<035qGK z$DbFT>WakhnQ{l89Ohi+)?WJ@@&nSN+A=~Nrxkcq^!3MqhL7dQy?siCNViCdV4klzjoFgHFgK}s&M}L&yWC2HL>~~-gLP$dZ7hWoJVV+?FIBT?@NXo zP!Qgm=Z~+XuYPx-0d79t36WRdOB*X33XOcepi1ia_o^R-n=-M@9y6{tT5bR6E!f(X zmX)~xw~NbGEO2Dx)woVoP$3IA@ysOfc#g`fwfkJBQ|@6Uq@>PM*;SX9|Hs|{FTq%? z6(zO>GV`v|G)>~1BCM?vHXyA8FE@A?^)%v^Ue}C&mF2nNyd_d znCUD(b0##-&kyXRax~bo$8^1RQgs2-6GI4iobpe4ctDM}VF-~s*{7GuD;=XjhggJ1=LgkQ zt18_ui`3*ILtboPlk$JL%y;RA#icd=g7D$7;dw*?D^BR0%(M{u{cyu*o7>?(X9*H6 zvnk;JrJmYp5sEdD0dH)g+qIe_P!%#`zy7nz*6|`AML<)pB_Euk6>i@mUrZ57KuBza z;K_Z`i#RSPQJyMD7ryqW`@i_`=wokdljd&O9-rYNZua|Z9uxA0jF1Cyv^ zfVFt8uEBz07y<&>B_(9coPvr97C;eg zZ33W>Ff}zzNKKUh4r)-ciZTGKHF!7{mVbkHAN zdCSMclMd#L4V=LL6b1i0$pHTkfVKt^#1w00N>nFe@DqZU$Shq?>>q<>BxzY#%9Qej zrGB*d-aAXi6IaK0hpGcvzNmx*Y^8cN|G`?#x6L>H!05?gzx)kV$$e>dOW>sBmrZR) ze-t)DN@9^CI5t)V`{PL_{*~Ok?>f!ZRPfH$4 z4oj$I+-H~w5Xj5NX*S`pa@ODOUL2(~YqVi4H#U%D-S!m^f=5MU=H zGSM5dI$?#A4tBCg;&E$*fBx)G!Xp27KWqWr*DvX%T;NvyI;W;nt@Xg&!-@{@WCsTa zlM4%xc1tZrd*iu=jSF>+jlpqosC_p>$uzv~M|gG}K%|4EWnw}R6NB^O;@E1x(o^xN z?3bCJpP#F%D+B~RJ$*uMZfs#8IYOw|(MmfmF)=ZCNcQdQ(eN2Jw1|j^pwErGE-EUj zq_Z>ov0GeR98NkB5z!tY7U*+vadDB5pC5m4VEt!tv1fW3lbM-$a%!sQ`SBJV14FC< zpM)g+p5r#^qJ$pg3!=x#ayQ`pX~(;Vh9a-1?Lk9|e35`c5ci<-MtOC$1UU1wwYBTa zMsV=ny$kL${yb*h+tU-&U|uHws@`HZG%hJ7Dhdf1=$gap6r?&nG(@&QNkj9=*8lZI zVX~mCEIcA2;>%w-;bZdb}(9(l)8*=4!&yhFehqSab>1;u5 ztx|+IIXM-T*|({7Od_q3wkYW-r`bu095~V2@SJ&cM1|}vdWE2#uiN1k> z@V_S~rN<;&TU*vUazyidjEphHm*-njNvPqyiQgTM}brxDIK9nkxv$CSi zfuZoipYC$H^7&lg zz|80?ucm0}IapfOaI2;@I0$NQ7m(mybeYo|Xv?Emjln*3xyvf(|DH}A6gsiMu=o*=I1ei zusb9KHd8ua$@IR~ijDKUCt!?Dg`O}OMRarrHLAP};sm^R)H z%#LLj7Kr=!2uQT}KYK5}3kR=7rH^K5Oh6A8FDu?3~UkRpVagH{JyT( z0tqBv)2rNK#ATUh@{KNpo)kDWxT;S8^1z8~@;k$Q5C}wHwSRH>4cI}*5|xsYB7_1xeKPM7S_3K@>_UlpFLp-I2?*%ZV=F4?LYM36 zSOGXN^z_67ZfHvzn-8w8oM!x3goN@HN^)}1H(~gMgsR@!Eg4^|S1C<4H#UqwmlZ=t zM>I@KOu!cxbs$pKgZ?{rkB`Zw1qB75u!KiM6e>+Arl5c_o-5G0%}av-1r$;W3JTIn zO5q(HLS}Go_ZMcKo}S8c5|m}PeSLjs2rwBrIpQ`p49t3MAv_MN(n9cFkFE4ed1Ruc zf^ahON=ld;8yhYs$I~U^9v&Xa&Sa*1#Dbph!7rv;PLGd;YOA}tUW2yTAV{O4qf1Kf zram*nx?5UG+x=*6ZIx>Vkw{ipR8&$b&cn_9(V2!a{^!s4n-ABAZ%V`&Krq*GK#;Ss zC4zCJ@Oug7sp^2^YcVG_F%iAw0SqIJAzl;%gf_yPH%UGv_n`d*d#v>0;v$G*aj-;o zi#1_kVPg1ha_C>T$A12V(9+U!Sp>a$Hb9}3U5L`c)HI~0N9^z6;q=SN)>bfp=aVPu zPeUuP5*vqy(E!Jgfu&0$VPIeY0-kXz@It`QZa>&`%x5)$tE$)Z8n+S)1`7{ot4`KF|# z1P*MCWwyTZ@WQK|yRZL*1j;#R01Rmv8GqFpbgds9A+xZs9IbZf)U>(+wcwej;P&n= zISb3%=H}+Q=H}4waCluky@HYwSmAu72BXG>=v@UvSHZ*meItN-Sy@?s?v7;}nVQ0Y zFv`ft7|p%~(Eu3`5KyPd2%fYF8c6*;J`N2IhF)G?Uf+6yEqVYy`3h~RaLqkI;%OIZ8MMn&sOkh7iLYtxC<0InY z;$~!Ln;b7TZ_Zxj1y;`YwZrx$|^MD_9U3Hv62N}Xc(1q&G&8Gs$SUr#RFBlh0jb*xzYYu}yt zlkVTByIvS_R|{gMyzz8ud-*SoW4`%!6P~_AFS+MkV1IdiH_cuCp8Jm};pDnKXWTvS zy*uA^_8*h0AB@&N{C~=eJM3Y>Vm z>S0Of6#0x`Qq?AVB_!=2mKSA|%HNe(T;wEVG)`F=d%e}H+eOIWbmUzLl|$avLZhMU9v z@kf`OeBFNI7~wK@k@M}!J$P?Or|r*xjg7cMZ{N8^yK-xka7Rm>4ClWb?RLyt0!WnyRdiUC!Uz^JH4NSJov_Qrhg| z-I4>L2I=y-SggVj@juOTTH5ng`S(0pcE$4x85}U}`I?dHwiQ+@t{N>y^BLvouK2oM z&=A)4x}A3$BXP`W7R(Eq@BPU%Y*VZ!HY%lm1V##c)HPk3izn3ejVC5MPdmUOY&$O; zmAUc;X-ae4ZkLYCb*#?zwe7mkj+<969@Crc)jw{ZIU7=XG!s*4dKLW5qAP{&Mo`AK zio7OA(~?Ao=h*~>A`j@bXvO%Pa~hX|OUizcRvu8E?&6XK?~#{tzWrXePk%|h#;SOe z@l5=nbM9Apa5rI$>-c_fMr2~k3t5|5uYm0(Yh3d3_`U>dG$3XB>i_R!IKYgohV$N9 z9A~>hheyM6b=lQbYes&&RF5>Tw8T1_>IXindpBSEOLpZ3bC>zvd1DF8OhOH=h$KAV>4Sdi=P!|G@5jm#6ewJEc|R%{Y5h^0S7m^c`lFb!9Yj! z!HteezzycC*&YPT&!qI5ph5ITo$P{C>Ejv6sw2Ez@IVK=p2o6$m&G*WVOvkxlQZR9 zOF4W5H+^nNZV_v}?qx3HD(@MkllGOG=q>k|hUfO?y>ADB@Y0$VLEM(%9^nofRn*Y2 z3q6s0B*e6c!uYZoyVCYlx^kl34O#W|v9v&k(nQ5kL*R7qnT>2j$6u`IXv zGR6}-e?}9ltwJ{;sp`bxL6n+YNsb0i(P;v%?inP5@qwd<(K&&wT1pE#B59?*s7gM( z)o}q!bxvxGbfZX=RDvgy zqNmzJUg32aaUPUehZd5Ccn>XdhreVH-lXg{DvhS{1yoG=aDrFE{ zsgkM7FK&~?kKa+lhmRjMHwFQSE3F?5Q?>4(`QEs3 zH3AwXovtF+S`7Ff7(s01O_DB~Fhi{#zApcM`2|QGuqH>>C+aMRrh&xUsi2hD1T7dw z65W~NB{%Xa#k4x-$X>LTCHA9j6IHQn^qg*4IImteFfk|~O-s(U<##A|lEwlp8%3CL zZh5(K2mA0mz`6I~DeHnb%9oavbap9?T^Dl7E!`u3xu^_q#n5tR?~MzF(~KfXhb6qb zN!Srkt|h*J+9kpHc&y9$3ENz~L~<}K3g|t~m5>9)9r^`GtSPKtCbeu4>A)#d^oOcjM&HXKj)KILDVu7bjkPuF<&oqco z%$aUTw~|N45TVM$hWG>H7AHLjPCDz&h~EG{?xcSmp84w#Q>O0C>&B%LH0DW1Yu~pb zGCW5O;>oJG)L&O zxw25R7z1gtGS6}s!}!?_;>x;NnE%{@)S#lUO&AK64}#p%elW*IX@)2uWu6jnW|zx8 zzn%cwC@&%eC!wx?zZ%~e zcAodA3GN{kQ>WuO-|MfakJm~w;}-P! zcWU+fT=4|i&rA^u0*AsmAzd3Y9>`*1EI}Yx2KqhCtF|t`(A`BYEHC1SSP|uVU==35 zmW+DT3;K>AAehC4i0XmgNg#x@mrXcoTv-arCnRN-n69hB?gWriPdq7r`L41OR7l>ns1hL5=Ey z8W8QFwt{2KtfZc&1~!wuvV3yh2nl&Q955IB{1X+087k~o-PsaYi1Qk6mnm;IDlVEx zx`mI@>ZAm&oG*mFv=dK}%qyMQT_J_m!%;~^UJsEf2s;TZ(1RFxAqEP`;PSMft|1a7 z()ATdA$o|H?-<6i2cGwIyMydu=JfP*2KoK_&u!w1E!W^`D=0u*)ZlhI18Eb@en8(d zyf3;8)xBW!u~kQXJ((jccB75R9!_T%>LosYI>8@0gJD22dGIksz_@K8@ckb(W*45` z`eDr>-pn#O0)kTu#UU9fsIe%51aEHGVIQjjxvr3Z(>lun%AV1H-7!uT8vuQ~ji9h4 zrZqxCn8Fu_`8Bp;pKCvYN4*0l<}5gqN;*bRBi-I!N$wgl>d1G-F9?FQPO_nY{=T6~R$ zttQo}4pcArS)F%DVQ`0urz4253@EyX%vBLcr9BGLZ#hhQH#kwVpjJ;bP+vN#o{N;kwU;~YeO8L|`tOX|uOyBsw#k84?u zWEdIA)TE|e+EN8c!=eesrL8Wl=l1n7Upicb*(x&iQF4UAyjBV zS6dMZ-0s~`Rw&vRL^>?@5v$Q7!q!jHQd7RwEvS5Ls0s`3NBnhj%{g6-40b@(PJC+pa-&d9=Zq#)8ZF9Xo=wN+?Y;n*=ACJV`X$8G;*K z3}od8%fWFc_A1F7gi68U{RCVjoA61p?D~H8g933TYc+R*1N)`1_Yqj#=Xm_KcSA)O zA?lp>dw!n%bgarlfG3BdC4|y+2=p_7%8V;Q>{Sx6?%>b_@tno(xStoQSSFv1>%NIj zEPJ$@Aq6s;IfG7*jeXnTKa%v^9;4{Q=Flp8)+L3C9dd~nS4kKgD75>sN;d}udoIJM z>tQ%e(z~|4TI<7McPNFFVYmc0F5mB%s&D#pxZEEP#?ye>{6Z7p&{}2b|?t{8#Hqm0Vx0BA0wgQE=kBN9JI#>feFju}rGW8P^- zPN#MlDQdY@%0pRJ1o4&n?S1T#JsC)7%z2Ft_Bn)Zu9eI0!R=^UnoXEz4uX=Z=-ije zKMRE_j*w^I(-N`HV1^)2w6*aKFf9Vn*@9SMHtLCdU0rQ;9E;7jM@7B1~;r*+?r13qoh9?p_r$xaAuy<)wHW&Gn)lK1es7$(BS77iW$^dt4>Z~f4Ypd{HNwKB z$k0B_pK#5@Iqo-^fps&D#;a@ zJ*;I}uu;V9eOfc17SiuIJ!be&AiE$DR8DB8$s>s}aEm@Q_t_%`Bi%6^E`d&{KKMUs z9q>D3=m8p4Tl}_d+~4`Ux)>S1@*4;ytd+R#0wv_;KbD&K5lE~xT6a;{55*th8^JHK zLeeSsEKsij=%$pnG=B6-4hxc6cfU6uWdE+q+@6nah`*ce4>$uK;9K1f;&(1(2cJ?* ztfh#EqLhfp|Ee#anoE{%0>8|FB1Xu+7OP0ruP!J@arr!9P)aCJ*p(VV@cHU>#|BQb z$RtuC3ESJ$ZCGDLv3sH4zzQmVRuaQT5N~f2U0TPZP04{X%y@}#DQsljI8C){Bn6igdAbK8i)KSNiwKCEBQHFL@;=JAcQlSTN!)gt>~C8|PE5W_04) zFFGf8&#ZqOw#1Baaf7?pKJ_tZBN+)%fF0ll zz9Z`SSAO1sbCA?@1^@)~|2|+qMi$QJO&Av`IWd?$Xb6Ze6xaIte*l0OkP;PA^;kLW z_G}|iOMdv!X=8!fkBwpqjp|?&CxnqCBt}XS6xHnSpQWN^UEO`WmWh(!eE#N_L#1%eU+Q+2 zKUt&wuaOdjA`D9s^#3mZ&j z1>)yEpg_eyN}GQy@W>8&0egk+!(-(CpI{26ASvR!zrp}`!I#S4K4Q)vXfh3YYr!0l zK_4)iFo0djH2HrP^XtfiYHb3oM zP;I*J7Si}USt;c*g2oc5jIVdcK3l447Rwp(D11(&osrm)$EzJ?yETvh`8ymvFd9!r zJQLc}({s#68n`!+YJ9{twV7TlQ*X1(`g$5`?XZLH`#3*S|H#(TciapGe+xGC4(D-g zw|UhW@Zty1`wAmlLxGUNA2AL|W&Vd?45eU?M%8V>%0JE!4R%A04I&v@`eJy`!wB4# zz<4jZ(A|~{B~IIRVwNmRv+H(ZcnvabOY&-Kk!gxs1R<5vBVgcYo)$sZLrG|&TCI^L zNPyq~>EC%-2>Fg_0ghr3XaIU$om2Sy(aP_ru$ z_J6I$ zr0r^dBPHNOOT+AQE3`$|ds=LAyx-un{1<6)5KL*v|6;|9tCqYjsZD`>f5&h*;GD?E z05%Xe*4t#b3Re7HujNUneh4yAjOFb8;O!Ax-9^uf(ev$* z$MWZQWpyi(s6nH}+%Fa^|A6_1CcJ_cE-x0%@X2HPJCEZu+5U=VICGtDv{ZpQtsy1K zJV|vv^;+(H&9Z-q);F0a0Tb`obyl9-9gSdFBe9N&}#tPak922Dd3rBC+ATEVb;5j8qHvgVF}p2=9M|24ZldJZ6| ze3VpANr}gN4aa3OGx&IW7@5$2clsaO-WT00K984R2whI-_t;cN!RF&{IuJk9edUYy zbTTy*Q?8N%i|G)$_9N8Vj_OIVFev8#$pVO zB=BhPpKUht$w5NzQ`E37>FRm69whallKb{Jk!ign<}dU}tbX>yj4iIOC4LVl515au zMAhQD0)ycL*9Rq!VPwLH`rtmMsF!Q{<6B*XeZjA7=>Hc=4f#R4^i5Uq z#c^vky7J6kw-*1|3?*>GLE_#;`O0UFHLQ@vOnkLazANGUvI4F#%%Ejmis>(R#yet5 zTUb;swydctUJD1(&XLu)T%Xs_%@ffnc6Yx6ZyFsc#L9bjwLkMYl;|rqPaSw>)2gpi za7Oqa?)ZdpL(OeIrZFdW-`6H1^2ISy?oZ18>UEeu-~0CPk7ytS(V9;Wp_y`V=DjLJ zuBPFwRpHR#_%?)bLxNk35*`D zU|F>fu@GsOI&yX|<|0)2U9`{ltzFdguPZ>YQ>i3vHTPGuH<-DN6XUm!l7da-LNlfz zS9#UgaHIE~;U6N86P`^i`JKT`TLP5bAQdct5aN;;lnb;$^;MHzW7}B@>&=A@Aa16mu-CI53sRpC5V<;)x>_}2NeJ_qwsXNANkI5wCQv_8eo7CuU43l)^C-}TNIzx<{oPRlm-U1oCOd%Ac!1s!z(-r?$SrINUt#iTcBC z9^SqFY%uM#eX=i&zJ2QHeKUCe2iR>7(2y`aZaL5aBTYlQYtc=qxmJA7w5a$B-*sH-gQU9+R%Vitxl}X{-<<>kobyAIkvw-5Ypd5%F^6 zK3~mjzZ%esR$thZ8%qQj!5s3#)>?A>$^XF^9560Va7K&<6AV8GGj?3)P?K`HT@6gh z1Ml|Oh(~0k*B02-Q+4n=>s5Bhs^VOHttIc?oF<-tDTYmURxTkJLCRjcI0O+4yNEB zMc5NWTm05+&)1XkLz6WZtT~l>)jAP$>Y{g%gWtq7Z!@ho^-+F+j`WChId=`tF<`A+ zfW}eMfDu5Mj%?FCLFuk(TugslUeHhLwb%->{;rPnw%?+0dJAJp4_XBGU;?dox+v&C zY`&2?o$$QZcR}>M>#~L-i0;}F3}T9~O`h5R^4HZ_)#$ zU`stRO#hMr0dmssxK-ZiFP@hd2mMaID$=~*kAg2bEH>zqTc|)T)TJ1zCWk8Fedy26 zBZphGQzsia^_^FL62^cK41p@#SUQ`z(c|S-zF332Xd=S*53l>`h^81jo=K`L8h`oy zuN#55=FXV$9li}`)&XgF=x>MbcMrmUUq$}HL9)|X_dji{N-LE&S1oBJ4YR9Tkfu}7rSzjnWd5&228^7pQ2K&tg*Y-x&*XR$a1($U%NSE?gzY*0!lkx-c1Shi- zjtj5vFHP~sB34@KO^c9Zb82Z*FOC--N7uKncvtNxDYvs zEK|~DF3JY;!Ru?MDUJnud{vaZlrWUyzyRMg1+bbvMkht-3Qs_3pID?IkL5Y0mQ79EuQTi*1_n9lk$-*R!vW4(jdyeTwtvqEunnxY*k|KneSde0=o0(Gyn%f6HkBu^C^ zjncuY&I`_EHOxc|eD&|_e>ECgRCYDxE~d#m-ZnIh=vjHH+jAKX$rfs)u1jGlZCy_{ zrlHPFb+R;YT8s_);C0rqXW7PJUo(}T{j?1>hF0Nle<6n^0BPocfkhfPc=gTG=Fey!w-BP0`jgewP%|ggfIROsb05-hSA=-BB z6fvI%`|A9gC5_D-$!){)K(^>9h*Rq_Ii>gcAc(T{?_t;8)mt-mykOA69#0p3b@ZRA zA?>Pa=m00gBQ}uv@Tj^Eq$MhMAoXR6?HcOo3hO0#51UzNLjBu-7+8l` zHUQ%PW>9p z&hSyOuDv*tONTaXZ=xw&6O~NF&B>&hBT^^a+HWNIw>|aPph!UWBA((lr8+xe zVwn(&OudoE(LeHG%Q(G5J#kFkDU)e4%9w1Z3C?kz5!|5Y5 zOjFK&Ig_BTEQ_L99UP4#F18QJr-5hPLg_qnCc*_PL;F5j1F z2FtZJ$N%DO|4(M9=gq$1s%`5Z(Rj>2$KQpmdK}@Shutl5vl^!br*#|S8o3CD1z*Wf z)IL^@muC#-;Xr<@e-_T`>mzL^)UXLwIq&pyI>-AHS`$BEaP9x&mTx{>CY3^lwHl2YirE*RGx9%jdiRK6b>9IOL+Dau~eVXwihl&oQd zUq-+mX|={$Lcl-p`+nc+`32?z!V51OSK@&FV9B4yj+!-6((gre3Y%xCY+WRSk`3yTjNI zLL*U54d58Mg`oa8!;RFgR6_hB7u_IOdO#MUUbyol3B)H-Xgh+d*VF<_UdL3l;_PT7 zbMbWLL0zVmrqgSfJ_x8b#tl~oyk!FXdELnM9OoEuNDCTF7(u+s(vf1j=3Q#>LYD?@ zbf63HG?u{};3&^O-VLZtmBVhQa3Va~#jsya7B@~nd2KW3TtAm z9+a^TK{Hk-kJ?6#pZWlbsze2e5;cRJ3M*JJJvN>B9Y5T`9Gs<(zHGEe#(u{kgxWL$ zv=RSOEJ63N(-ixIF^A{E3Pbx`d`cG&51Z5eq@+JSG1D(eq{R+kLvW#1^xJO4zQbB4MV_!(sF2BuRqHoyGq9ts; zG!)Y+dwx#lcsWfsPT4uDT_<3>UvnzE`1jXhD5+|@I_mcD=a%DVrNZnK#x%s-Bwc|b z4DEK|ArTuPZ4fr7uB?GXAP&w|!9b3kuclC{f4^eQ^wodOQNv-)@xQhVkF8m}K2Gji zd$AYx+%PMdmRDO5*GBe2WVy%hI~wT|ExUlI5zP6SsQq~`Pc;)+qm)R=W1ZlLJ$H^;$}@7SbQus-&!L zH+R3P%K77zyXA>`5!$t_xk;5SFb%PCG`?j#VfMQ^FHBu`mx%IPGMFLjM>gtr(v|cE z6(qzF`ZH(AC`rL;#(Iw|Sa@>VEEK9drib~&{z|w-D+IdG`o?ToxO;k`Gwpl0_os4W zgAuT|(~M*Km+Fm^{?bPLj9Ta4k*#nztDdiKSb!p}9t&=yL9kY-GV{MU@_g=TQpA^h zDbW~aj<=E?j$Y)h3pp_#$xt0aX8O8wV%|iD=67$tSs?*+j%e*lud6(PMxynqc2T@C zuSRu##n@nKMCfd{wD?n$AGq8K*52OpwjCpom$d(mV~4QWOHKQ!8ioOnJRkyZ52O!1 z45qKarWY(T;bU-b?ElGzG2h=kvK)TFrSW@nh)3a|QZD_~l+C7OEs&3_kW{o(DjZ4| z;6pR*VJt)qH-*vsph3@clXbL=ku-6O7>U|HOngo{u$C?GbZu+ss<>ai zJDGm3eBcc@JW;U9*G?w5!jd_=&PZZJ(y@;3(rbka%$uwoJbXaX{5fEDKAQdcypiLE zKu@Ey8v*!E8gQ^j*NKe$bQYI>>GnNan&N)@m!c){NHfX^rn zdVYS6@(5q!Yj%|t)ocM5+Rr>u#?y)$!Wx-;vlg;q%`I7an~5BwlreHP%1>&~=Ah`I z(N!$unhgo9l)eZ7qK!B9~np=Nu} zmgPwcSbcE6Y+1F76pO?%yxJb(+Km&Djmls6BF9eA3}Wf{g+Pk!j%yla>!c7Jq@Flh z^-`ars&6DZLY8u1>x3O0WS0Gf3Tv<()Oa#iek(x$oza^fXu?-qX)Um!qhmE;Ij`sQ zvHK5HcY7haF?BpR-*AasWrDJnH1 zR&0s(mLn6Exjp&}BO2eYrY<%)Eb6l9C}T-cf-k1eOG3^jC=zMf)Mp>(wlBAff2gjxhhM>ztR-zBqJn_K>UTY%s(uGD%W)hL=9d&6XA zr&bx5>gi_WN7Hoz?De|%OKjLP+yP-PZB=<8x|)Q4D5%0Aq(rzC3)1it=Y|MPRSEWh zgM&Ye{ihbL(dEG+&vQin)#tJwhr=d~EljMYq?) zdpGAs+|0^*ch*?4Amw$OYyB6Og1%wI6(gGT44mfbHFJ17sTMy{;lX%^Gn&rvL}lbE;&IT)j^3-p!j|a(}+>LPwcdEoi7^ zpl#i}6l1&m)}SGCiWT1^ zHfMt{-U+4CY&~ig+{Mba=WlK^o(tHZTY(SQO=?C65|glNR8$&j`mtdXXwWS1iunAb z?mifX%JqC$=y*LwW!xVKg~RXFb|VPc5oI0>$i@XqvjY+FC9;zGU>-#C|-`n68?+YJPJx zlFf1a58l!Q*SS!(-YeTxIUkI#$dsjgA$UGQk1pcGwZ~s7fcN9@Se7z%ot-A%ZXDE> zk*de%6-k~@t*@!y^lXGVW>WO_>lXawteyJ;<8LY)_BMeU8bR`Hf$$8==Cg8!JJ%-3ocu1HKW zp0MHzIXFxDR@N>WUahSuBTe^u%s$2tLNm)X7S7B+S#tHlHBPCNeiysq+H9A@T4`=_ zg+aDroNDHqtu@kQ<}qa7+*zT~`hyAf4rUbLk1C{Nnh>RCA{v0F4*H@*2<4fQ@afrDM+#UHk+e@=<1s=6?}hAA1NIVh zs0zk?8v6P~|COaG3jCNO3>qM%Oxu(pDRn0aa&#Hf*L=o$rIZ33ixeR0aznPt6@oT2Z=_=*=tt`4URhy3u>c z11yHmgKC2v_Q-bw(f}_t0EJcbi2zjVj7v?=3AyvPK|Tk{pjIB{yz6GtGMrmt!&epy z#_u$95Za+D#ANKwZqAwy?T-H?1_p8eIdeuZb+SH_$(j?Mhd)1l?noOSW%vQEsoUc} zf^L1ac6{F%Jbs*^MIjJ1VdY7g+pgBNO0UZQTQExeOvP^ObtZqH{*QX02MkG3sAVd) zC-RR#6xN`pwy>&wRz-~-l+Q_1qCLP|bCiclCcgPOG#vOG8istfK83ZlOxO~jMhzGXF13T1cK=fPyEZ!?9R!|mW7)a-*BZV4(H3)KBy# z{eyU<{RD!O?;%7TY#%S!?{k+;8NF+?tWVr>?Io$y-KlFgGelO0UKRxy?a_u1_t}}9rRms#VLjJf?o;Zj48}P!9zZ+3!8gW!t*3^IWQ*q8Pb~|>CTU}0j&j-ktUDan_Cj!IP)ZARb{jN<=bsv5BDCL`JX04yR1BM}*re)#c1gHeH zM~M8E7`wfgJP7-qfF4HDUai!Sv%GiS)enI*4BU* zSJ7N=x3l$sI94raLTUBnuRWwL&ha#Vk8}0w?254@G*geFUqKcqkBCDTOF|nlocX_I z@37l?^ZR9nH~0Q769|aqk};YS$oSMWT`m5RFH}B`h+7xOHj6eIh^Tywg`auJfA{ra z$kDH!@a0Nf8Evpw(+b#6lObZ$Y%>N2TNn7?rK%a!>x?>S_U^X=^dT>&NUsM&xbnmJ z;5J{AW>Tk}D*cJH7~O(y;V%yRS)7^BS=&62`Zxg}dC_z40PiQTCi<*{;-p~+E~x5+ z0GMyzo<2lPD3}6*9l^O@fJb;O&76%za$4liR@3LqT7Tek*dFaX<%?V*yIew%r^A(w z1>THM8r_Ue5k|^jA@ef~4Nq^EJp=0xOUtn7xNL&@<2)&ill9$**VSAug1sUZD@u-3 zv~B|$ar2M@UXbzl^QBSMKs4n^?`*w;W1SphonD*t&K-K9C&%Op9Wy&Cy2B390uQzZ z0}4W>g+%19Yej-gTwjE^jPJ(P`FGrgD-Ah}Y25XT=P>^uF40h*RuS)rj|JH1#CF*% zJVkuI%F)EY9g!+a4**>m-s#@yadM;4fE#vvYp-+3-k(4+_@HgH`pDG|Bq@zwe8!Xe&{$t$?G)1l7%4ukYZA?Y1VAporK(F?uP$rb;>jY##c z4~<;}iHrvhO;#MvOrH_ZK+^{FeMNoR5nr19)JJui)=Y>jdS9hbHTx&xM1E*&Vgm1A z)jUZg=v4B9ng3qR{zLU=H|dlu38h{nDPp!T?v29JFI#akFEg!|y}D?7OTl=(5;7k^ zTcQ%Fn)@i;;M`9AJ&c?zBct&bDSGPsW|l?c*$|`pSIuyGyKn5_9^ay*B3nBAubRp3 zxbR<1t0`t%9vmy)A1U3nc>R5Kx4AT(d+zV#0YZ(Lnptg(r)O1g6vov`Ek`oOH#RL1 zL|F=ATcf;`C7*vq2P~#1Ig%?kpA(wMQB(X*W7H=-X-h271R%S2mhM6m>1~qrsg8bO zTmZNvL{K*qF_=Z4_blDbFKSzqsRS{fw&jTud;%w?%8!>vL~1Q@@#F`{yN8%Lb9^ zbk6DTdKI5cOmB4+3W*WWcB?Or(IYE09(x0>L-l0o?-%RisnFIhfVc^Z+HS6bC@6X^ zM?(;la`MOTl})T8eC}DKG420EcVDLHE`FwiLahaYjIg1mBQYqpK!l_7bI^%=;kDSw z?Emib!&{lcBmh(SKEL-ZBN;dOGBPO1@}i;OIc+>>vVcVz$E3=Yh9n%vmN;JOcQWG@ zV`+(h_{wepjg5YDV}G(1^ougy{ejo}g7ViS*CXPjW?pOOu~g0uWF7~6{ zO|PXOX{&cT`dB6zjh+hTT(Og$W9cYCaIoW{!gsf#ul8G_=)fyRHJ(?H%k01Dv-Z@_ytF*(W2q7$&B`329JNOXFhV5O zhOl>5tZRopwWR4q>_P4n(qGg8Ny(7)^-+_}Dr-|?&7rnLsaQ0zvDPejbSV{YX|lFL z-i+Bcl&0bO*H;J2nlDpXcrD0~j_}5;6chBB4@Awi=>J5qwvfj zueYWl)y0*j+zOx9e#?3GAK&qnVELL%stFRqV_l?tq=gRAALe@dOakT^8^l(Xbukcy zn*|q zj)AR3hS0nHx0FGVsuO5k(}U-HFsT+;``oxGs_Wz!u2BDLmve4plPZv_76Qz3cag{Wy)0x)~nz#zR=Z<=zt+szyi+%h36-C~e-*5}hDdsKFBbBM-t$`6} zVSkp@|2)Pcs9D3>v{Z;Mh#ZxERQaML;@#(?{-I z1W(+;tL3uF^r%FL>(;W4VpxwSE$yvaYyHrs%LOiL>$k;)@ zeY_XYlod`I{l>20m^mCP{&%(fLm754MJm&k9m~00Fv>{hXXqCVnX0m>+%w5YZTt8; z@Mfpr`8}}+;mmex1T*e;jJ8JgZB$2PLbjtkSvae!_E~vt*PFNP0n&!9i3`h%om{$x zaw(d#kn?cBnQ{>X!Pl%Gnls_2uZx5!lN#4xs zGssXhWY3=YyG*JZ#XEA@eyF8_Mq1~ImSkOc@{cBxnm#VJS;>&W9+g}FEs5@y0*qU^ z^bO%hX4VuBtRz2UnIcAabf7;anSMXu9P*M77`VkJlZYAY{P$+J<~X8oHO$!qM<)KI zmS|)m^nkdGArjpsMXpb~ji=e3mMMPZ+_FC@$-k!7UK1g@h`z{EPC8Leou>YMyc9=9 zTZ51cyXYLbtf8;tK5&>z?VHuFIVHM#i3)mG&kq54wy$j4Pnjo)%?||)grHha+$w{g z8p)4KZXYu0OM@cR2_Jp)Mk)xxD)j6qktO`si^_(*E*6oC?(#YTN@N}P>Nl~Z4IL8a zjvs&l1MIR(S5xH3aJTH}=FY4R_jvk0lzPcwj-aStv^qbZ zFb8I*?t0tff(g zxnUT}Z<+*|t7MTwQDPZdP-N#6=(e6O`!`3$R(k&Wg{*iVeJiUwgZe@*{eMSr5G<5k zd5R;ReEnPl*n{8CNh7jlTab#cdhgF?<0;t69gn%>4k*{{I(L$4Tz_^ei$)N39{A?^ zw|p)$e#V;!&=IAgVW&z7(T3D#{fb}9{baKCIhxVvdMr97(W8>S0?)7EP0x~?C8cZy zp6T;7(Xf_yBH~SDQlILISu;kg)*OFojF(|ZBwo@o=q^yq5}j0~$WtHtJ1VCyHQ-e1 z3;1PzDU&Twp`yF~+#}N)yuS!G*V`!W_M=G`Rk-8+8&}NUVzUr9Z8lb2n<#(0BU3M` z>7>FQWBwz5a$Bnmjd&Ppp}R&O;`p?lC(_3CtBo+Re^hKeWZhj-xKG{A0Cj%dZqMc? z5$Luh$p{*@*Z+S!opn@H-`lr`76FkKrKG!4N{~+J?(S|FKtZ}YN0jawx*LY>PGO|G z8F)ILJmlmGvsRCY?@xNR`&3Z!6yT5d#Z&akm^Lr{b z62a_hm11ettYBwVC_nkdh%7Sa#QO$ZDe>~bLx~FZ+!kL`eq7Kd%qen0h&feCD}MMq z(mPHs?$vVq*>S49TgsPexv`uTao2FTbEeViz<9mhMJl7`#VTGNydlm@SpaJu1RaAEvn%6aK-{@O~raHhvCJc>?}mw8HY& z@Ohu|Lss%MGC=SDHXM#OyrXyXpL%09AfHhIX0D>REJwTIfLB!i%6^kB(JfbD;GeRz zmeG-XImDfEijfn%tS@{OZ@FEUv?IqZ>>b|>Z(Z7vFrh%$ zccUXQuw{tWIE&C^e&!v@rJ6ABkZ>liF`4COIC_H_IjZP~6At!3jIIZUkr zc`Sss0z2?yUY#9{;5QYth!Kn&Yb=x~EN7PVe{>bw-{DZRWme;~tTS2jX4~I2g4zCY zcxj>pIfyWTS7a*n+L?1iJ7bxYrr5srdaAhqL#8CUBB>l-irSoVVQ!$NlOu(@R9iAv`34|cmJ2h4MZi*%q6Y4zINUvna#*wpuN2|#82 zyTu6x49uXTtw&F`xST6ivu1LBPjOycW%yZ2GOX2hjM6+VibTO6+^hq^Yw-=7Lw0Do z@+xv*emR|QBplm-x!%^mzf}Qdmq$O9CM8U=zEPNb#iwHkcCHg29K?&f{op~ViShIK zv5|6~Hoatx?@I|m42BOS6}d9wv;ddA>vOG+=bZ7g?}-Rb+(ac47L7%`tVi*E6y$4J zM<4rpPA9tuq6kP_)A^y@R_IOb+^`P$hgEej`)q_?>3C$D0WG76r1@V?J>x#-{eeex za;Y$~UVf3+^07IO89RsQxjVSHxS05I0lhb#OfGWZgpqPa?Y;zq2+?NH*6|z z&^i`g;x&T;_wL|x9LPaC*KDVL8-5&~N7~nu4%ArX;A(tjk5L+ZSK{+vH>tka<3lS; z7F}vQUs}S6Ibj7EsUY!-yAJv-1$Ct9)OR(PajS|<&G8p^-3pK9i1V+jAHb{IOU(hK zABuOhz+KaabYdW1*kN=vZ^KMO97=u+NTZDIeDM&JvQnq_vO32vgQ*@v-VI#{c-1!{ z>b1ct`rAzEYFE4_J7pX(iq@q>((DmrbBYGg10w7GY*?1pde3}5NY>$Wd$GtzW5(Yh z#=r~@l|TX_@L7_DjFN}6X16#^>MW{rIoj^sLvF9UbuTzW+7&+};)44`j$rffKuxEEWnTJd0&^!ddu6SeE**k zxQV*?!v`WNxtOlmAZct`*e{>-EPHz!ecpJXKnvU+>G4W%s#Vj1WLVcC6BA6D-z2SZ zy^Zc5i414un^HH-e>u;3-g-2HZgD-P^V8R6SPRd@W3g!GZZqG9HyT>1=-u}5@B&ZT5+cm7 zd^%FcI{|%1vsE)L&u_6X>~y7F=tdEpg@i2{+5|u8Cuw3_BH};Vs<}(TI^E9uPY^_Q zA3jf}FL4pE`#j3|+tzHdM1-RxjN|J?q7DztlvJ9vf_h$q<&4$EsTf@lc5m$P+9Vw7 z<7J$e?)vQYyYImCyvHfJaki44lwcGkL(Be=n!>US)v2a|EnVcx+h6sbWyh19QKw_@ zju9|hXZ2@_*$)yftz+ppzXAfmr;WH7auS3kIzql5OU>B81T(Kj+D3;D=HMblcWL52 zB!)4#iJ!U(S)Cvu{|L|Rj`lyKDF^Ox-Sv_Kv6C|DVBBRak*X=5hggC=V#)OqDebCQ zA{h6hz{|4V;Voji5-~;3qK&R4=iC+y1jBfzcjsC#dE?<37}PNvyi!4wk6SNI3k-{qw6mZ{@lpIcFA4iWg5_a(8eJB@b1_GAoi%Tg?A^V-!yEOXcUwX&_l znk&D$6n6dHGQ~T_U)KP?(l^L)w#QGaY*$|XZN{V#4IR<2yJsH>n{o^yeDhlV#Nxy% zHty|fc|eyIK*$gl>USd0wuM!+)eKEta7hfmmb49joQ`m7`#HQS$+ZCaB*j+hqb*X8 z;Xkiwj=k%npF8Fr*|j#EdSFe~X&&+Y0I5woVnv2OkVTVP8=9}45YvwAUJ|o+&Sz0N z_PAg~l8!Gc7HyLZpl=1@#)j~q?RAofJ{ z?Wn>5M~6FncsBmeAoW2ogd(bIjo0KpvJyYF{pmAbXkg2H6pKIyQ6Fa{7kn}Q+e-Qf zo1_n+t*0Fyin3i_=lVGkfBO~elWk-SoHX8>GxK1 zkOy(_+QoJC6W$Bx8%?kp&Bfd%efPcV5Kj8HoV8Ye|5lItyXWPCoo&NC;2w}Rq{p@W zjvVlkXVi2(ujeu?Th#+E$)S+9YSZ{L7%_3VY3eAcX-0};#4p-1m=tC_v%s5|gg87{ z)hI^d6WwMsmdsW36a?c$*~+#bouYrM?@U_=_k;+DWg8YfJCIoCejSVysb5p8=o+)t zY&mKW;i@!0B-hh(NTgyzP0$fwSE8u|inG^Fw*bUXz@A|MB%Fs~sHBRb z5`z8Omz{u)r~%H`T{i}8nyp@!V7vq#0NMZ6_x8W_*}>DRI9+j9QJs%#r_M(;gWHMn zY?A62wUt`LgOmrZgd$t=;;Mw)kK9JOkMFsp&cF8i=i}R(Mug6>SMi$dp$!hABeR?L>pYrjqu(JX*BrZP1e>Tpj zs7M^saipnMh-?MUe_~i_MD7qNHaC*!Gb?+nfQFP~qGzLcrHzGwH!y9LXcx_eOR6Kh z9F#F}2tWskOQ!sLJ616b^#zZrN`y z;@9Bv_}xhqPuyh5QXpst;6KsUG!uvGV(5y@VZsZ z294$2MP`yFaM?{`Vv8}$ibcllknz1_obbpE8!s70+kJ_bMD*M|uinIsughA=CmpHw zAxt*eGu=P)y5Q5MT-*-2@yya96@yOer=_;byN#wlGGAVRZQIM?W3{b^eOsSvcpIsE z&}D?T|I`hl7HG?zhg-7=@ab99wB6REtof1)&=$OS&J_|USkiSa8CSLN?xUH?qjU! zSBVmWP3AW5GDaEU(=jqmS!Ey((9X*aBymK3pPjFjX-u8xcOb3%?(i;fzC+-RT2-hb zZ8B%@bha#ykypBgwFs}g$RFlr-)I*S3yR48IJR(66mf_<>yp~Fg(R8ud-MbAr>;u9 zSZDEC7TPEZ^fSi#c>lQmseb2yL4clSq{>L;A8E3!6a zFFZ8&{ZGdmRM@{d_ouIS{?nPlYHIvcwsgcU0f5piUz;>}0DhJ4T$%ID64ZD&ktSu6 ziA5-32sZ*bYK)d?+s=Go*yF&BG2uNvXZfl!&4g)b9K(c$Mb?~19u}b*^-8BeAjN9W zJzq?xnjaCg7!NvY`f13#Y|Z)mJ(P9mc~R-m3aa4xK=;31fT}fjh)2bS6Dw?MfxeNo zRW7gaN70|-N8(PI?<=!Mih!oZ!$8x}R$*C<|7 zuPkYE45eFtL6rhl1C5s-m)Iwh7K&TVi`c9 z<={Ep{X_hn3~4>=x^ZM`v4}KH%Z0*2G-xSPVP(;=s1tdio2@PA+Nmnzc~wSJ zlgDJ_ZtES70-LjOFmsiL`r-88gJmk@XMlS`ipS-Mk?_LUZA=9~KU?+vdcn>kV+;4= zM{)_YUGIlh$3|B+?BVuNm=OOh7u*R&+6=Qs83-h-{CT+ywma`<1LB~Oc~f*e?N%9& zW%8WWxMUO+QOKA7zD$`t|1Kh5Jc0N=In}wc%;N<&stWiJ#vQH+t=yNl;U)wjBj%-~ zZ3iJo#J)dtrj>{=Q=56PI!vScdcpHaGSiG_EDecjy7Uzr-0d>a{#^4Z_zs3R5tVg*W!_Paxb0 z)`O*6`kAb+e=%KmMD1EwhD8aYUBzA#y#g{hoWj(C8D zDhaVWUT!!F13A8p&dXnU;M?J7@(yt|@g`%UUzv_9YcUE%?bk=R*y7k;!t%fNS@y^x zB0z2Jmv3@Z|G_c@4?kwF10BI3olEUO9=Jbs>50eAozYc7txh ztFUUyy(+HnK?uAV=2`F&8*|4Z_#7wL4`)#{6QeB-fzk`;DA##M((RQIfH=MVZ z6sr^ef>>xS+L;Bw4{(PZRBrk>5%`SJ_Apm9&=X3xd9&gaRwb=HFiT?wKIbEJOF|Dd zb-0U=%u)va-no=;mDgnMpoXX(^edSULs%NNnL*pzv}(Oi1-R+U7q({7bwg9m%#yZ- z=BI`qDy+5!oE9c-p%u|SB}vKkv61C12jZ;{X`jgIn^P@1o*5qW-BnlKlGN{5ifL*S zV3+ip)yKAUSeRFEA7j8~Gsss&PNh%vLg`-G%2!Sas1W>l3a<8(r>hmuoGBR!ZjtVs z0A^_2?YAQCRUU_US=;ZKL@GBrLSH!-#hEq82Jp@b%uQ1XPl|(jTR9<b3ILuT7WwWPp5Fp06&Ms(!?AzC-8{4|H1_zPP_oYwsR-WH@?njEdfxU| zU?m2aa1fuzehW1~^c0#d@Aubi=>JajtI*x3kyAk1n=S}~J^`uo7|+7yzoCKc{wS9j zMLrGe5m{mG1yjjESe-56i!ywu zF)w+#nay(RI(d31d0KE8+nx@q=keIUK&W8>>F!Ai_(~Y5tEkmmk<31gV4s>Dh`s4~ zvitErJG}K@OxJtv1{g%-$L+$JLf~6U1dn}?%T zRrOw7F3RNW3?}Ccjk8UWzr5K-^Vt5m7l!6$Z|))aRxR9k^cz1sed(Mz7%{N3kSdfD zBBNfkJzAK-gXc^rZ#6-1N@Lq^6aZWaOy1`)+;5qc4?MO@puNBLAUvoYE6*8N3=y6q z`K3@LN$P~|HKibnOXNn}u;7xanV)-mX1jWuRph%*Rznj6A4XM_+s{s(F2=h1%vnx~ zbOQ*w1So8M|4X|ynmlc_5<12=@a1nHX5!&>3fUux>%Z*OKRev4(v-)-8YdPnMhI~1 zgn72zfUEjdq)9^|Y@H;M2|fIMoC3-$nIq7nsf=s@m`41n4#;_uGTizR)k0Bq zSCc`fBp$z+ow91erZ3x$eBo3b`ubWYk|zr%=O)|Z8wH1skK&7MPTSnqRxq#4fc4XL zzsGr5FnA_cdcx@2Ay@T{?#+Qm+@4WGMl&Q`h_zO(X$QzF>z~M?VYyB6(VppU3FQg;HorvT5D3S_)G&Jh<)*B$0oFx zjkS4_Jl#XIkKq22JRJjI{TtlbD*9p5Wdl$h;^!lC{4auN1)(odyh@`me-rc)r72t< z4ow(Z;h6#2&w3K zZ~R#J+Ps#~FgN=?@wsd~V;em`cBfR7 zdbmPXxY74%qJpvd676(gSzP~M`^E(*R%(}=eZE9jCU?W))Qw*sVpTIgu63|O{@9nd z=$M7x4g|>nfK$N92RvP!qL;;IAC#bBYaP}Gn-@{6t~|gG>|cJnD3lisK(d|z#5;^2 z6=BXII|P6HE=9`P0~8_u*ScGTff$=tWDkCV@)AJKE&(8JS1VdK6dqIo{)POLr_fo+ z9bi_t{$~Mk6M;qkd=_>nA1S-bwnR4YVV{2|y3zRJk3%WvhY~S!-6;a}jO9=+;w_4p zo;i$)19Zc=x!GTZo>}&|kzXl!tT#NTju!*@pZ_599`SRXXS_`bumQ3V*PIwNx|Oc* zMg+D?%}Wws9WG3E;@W$V^V&Jn%cK}z!w%J?FPWhak(cB>Y|quJkK|=eoRR**Oz@HY zUGT%r@26;#C2xlx1@fT8aV}Y^K_5e)`)#Ra5 z-7FJwTshGHh~QSQd!3$q8)xXaD>H|5JzaJK`WMq(!u@ zg=Id{-h;Wa^D!sje*_Z4>r$fm-C&JW4*=8>V5&D$&@Uqtyf1vRCYE zTog(T3u2qEj=AtW9|N`a{wz^$J*<##x z`3tIXxCcAR27nh(^!ZMZ&ZGdXKX>b1+}$VR0-wyXeGf|b;VoKcPA{G!x$tc-Cz1SO z>m+gf%#e3BW5EHS<4=f^2t+hU?Tz~ASKOGfL@j3Wf<-wO*ZC<>qjM&5loR6U$%p;e zcmlRiB~93Cn)HPs)1E8dlf9iMncHa;XRl6|bKEU|kai%LCpN8kW9L{UD(dlKnusWC zd>*i$JXSrJucAqiHtb1xXyIxhff#H=b27YYh?C5uLg+&$W#l&1>;0#u^P-Xy`E7#q zRHO;j8U#+dC?5p3qgAl!qu+AvpTiX3#=LL~z220~3;FC#y{Q5xna>}bwinpxpvX+o zPPW^4H(TI6QyO<2Y_g)R6ctP(|1mS!f|V$uB(ky0?1Kg;s60+TWDa^dE5g&qP$(3| z_-#48rK4z;hn}hIqx-yG7Q(^}HWlafV!($wVCA#bHJK?wMk0a#aN2B;QI_%3WM&zw zGL<8&^wR3mtA!aauOm>Ma0_`eWIsy}LiLqyp5ZkUf3UuSr74aB2EG1()|$wYxd>%* zD|1kpqj$JhQeLlC6SBcrKjp=$Zv%|3-gNO5d}*-%mPTtT5Oj%fguW>O;0rbSP@x@+ z!PfS+&T3+y&F9j2m?q75u2f@0Sgqfb*BlKjuEx1AB{V)6$(DX6tzufupfuUo2%}r* zBo^IX?3NkwZJgAVppkiNbr9M>UgTg@6(Jiy-SFa{%`m}^K}1x#P*q#M%&(YWjZjbJ z(g93~ss?pFAm)8i-XyDlYSvm05Wbr0@H7dP7dJPhnQKzC$;$65u`{~6DJU_FT0wMb zy>NLt${qBfBYy9X1;Fs|VuMyVeZlb|slX)*dCa)F`_BiNQ3Kqu2K6G4Om=KlhQIko zc{zt?LkMPy;6_K}3_JX}s5vuY+K5Xw$ zGk46TJXc!ruExp0BPmPo@VZ^zYjL_%8~^^B7U6IY?FmC@QH?e#3!ghs?KnH#1_1tuIHCz)PLPFb0Zo`L4Pbb4xm0-f+`PjJQKJ}>3@V^VC)AiO2z1c zX&sJW_L^AD_b#XBPp}E#1A}htpXFAbh=-e2K8m?V0!EfQ9Rb!i+?uZ}k*k>k9*m^lO7g0Dr^^W;42<1QMj{K~8PwQvaHZ7at4Q`bA+g zqFZnd)lf)ug`1A(ke=9fg+ns|Zndl->bl6O(cneK23{?5p~f~znuo?h+i0KGmzUD% z-|yL?ZP)oxUVe^k4(iN=a0H+g(n5XPV1c>2`M8zC4sy;p6&7YIqov8kOQBB zpV@MD)=`!UnB^&6v<#^!Xw2v+)mGkN(jcj$}av|2sq$UaEjQvLG|S5z3|;phrDjSro=SF8bm>n#qIl zZw#KbJ zkSYah%3p2|F=}haI3Cs>hKa6&h4R=_h2~ZVdhhGFVe{HK#J1ebMv5rBiP z2N2P?%7M2xH%S`p@UL9xJ3G^a)(%R^Yt8MOv&uR!pmLj~tO2FtX$-VIEXuY^YIb(% zI9WQU_>>b~z8++{69ssVf{efveE+juOKYpxnR|w0${%CfGW$x^N z8-b?dhLz;AaF&EmX2l*JV-+ek?aT6x=%X}pG`irkY1!cVpS1x+U#l4dm9PD3iS_C( zm+nS&y=eWEggs~Cgp~>TmZ&FX^ntGpB&~ilV%03IV z-053@vFdtKJBjRe5~&rBam-^p?7Y{V<~RMco=BLqDz3DQWYt!kQN@n^-2rFPR`~KUQ1H7Tz2#kPMtAPuAWFkmx86$ zrVQO@zBvB3XN8q_cyYjZZ!BkL3NIBBHYOahksy zpw(2{gwfYFD|&N#W}kQNp*y#T3>Ge#Igb;)?~(|#mBRhXN*t5k>LeyqWz0&)5DQ=y zkqhY*WGnJE$Hq2tyQGzLb7ttHF5x#$92gf`+BqdupFuhY6h#Bly?%&Ahr@kJRNXv8 z^O-Xsj?bB7+dwDcnG!44|36BnHHZ~W$y5jmUzpC7)5oX_Ldao%2jMO=7kuaC@;@Hc z{fnS2P6~bnsD6~9VALbb#QmKqQI%*F5B^pfP_3M3CQUYq;;3IJ4;^@2hHPS|Q~1U~ z_mr<+`A1naJ@1_QJH_OBJf@N>n^lr+^i7Da-f;v zk=>I8fXKDE86j}l+2zYS!Kl@JJ z2PYh=iIy>`Y^}*iyrPEY$o9^>Mlzvf_HSVGXwE zvSwfjN1ajRJUFBd|vjL&fpn;X#$8wUv3gY-sHwv z(zc*nN8S9yDtOC9jQ^PN_BDjek|l`zr^W|+LBQ$fG9}BQojB3h3E)i~|50_X%c4ox zR@+3tn>aXnz`+ZTPL8qLL^2NicZriqQPxL-TCmD7>l-dU<-H3_nH6{IP$@&RBDZ9! zDwDX~d6Ai9e}pFA^H=VJd+fHroY2On_E>?K(yuM&g16?uEWzJ4FKjph=5f@HpdTgY z`x;PY24!DgET`n1&3dr5ht4*47b0I>FKr+^K~FWeMOmNFPo8=D3$_^mL|CcA_2fSc z7l8bfY$6kg+RT&__I-A6P9h>ebo&C^u?d9s@}Ki?cE$~Nh)4j&s6om|-nILia2O=f z=?gps1pi*lLR-lG@LXlrj53>;Dq_WWqOE~k7PFs-o1KG(JgcQjc~ohwvu2Mx4Z$?D znzd#5*D_;4f<=;~TKhzsu!3Js0k6i+Cnehz_TJ?E;Bl5Zw?^lyLW(F54 z4GwITRXX6bPlsgnKE_(p>8E$8>5S&}(d9*Xo zF#tgz3DXp|;%w@tfoCZ<{$(lOhyYPG0}&Pxx@zDUTVow}V>xubb#JP10Vv5)Gc?!x z*iN~-k*(#Gd>Shc&v8KxvNVR=K7RK|M-c@LMjp9?ARW_%Su6{FF8*$IZ8Cj`QsPS`!w7enbJvKORuOCYH6sM5nM|su@q>qY6!3?4K2pPM{ z5(Rd;O*jei@qHW?zFl9qJm+7H&)&E{hUVVh13N*d`f zgM)mSdTKIi353N!F-uaX+ii9&MXoYV>AH&0wgR3htJx`Y-))jh8PPiIdyh8Wtl4_p z6!d*}aE|3efF>#_e>}Ci-g(sF)P7_CA?Sll6pRE>A6aPfVA#OGU=_|Q*7x3$PUm%a zPh{x*v2o4QjyFKIW>ek^VpzFf{+{!a1N|TpfoA|`?*+_1U#*_~W4o!>`s#(*Lpi2= z$z>42r)#;-k)P5EPHmmw1to91(1X)?jq2K7R3a1cO)Xt=r~KgwM9?fgPpO?5YdOfD zZ#_i$#Zo%zb=7W(nA{eaqtRlGo7n?Glq+4y#5~8KcyS29775t@m>O`1O-b4DMRy+9 zPtja;iWnOcPKP!eHZ|83(X|jDzu~YjF@a0zh;*QO`t!-dnfa2~#eqoulPqvxJ zAMWo~&5xE}wcgV2zX_RZQzd~cy|YUvr_01nzV<1yd>%RK?EkF8=O8!6O&mJEO3f!~ zRi9l#Rf4|NUsF0}BYiR0(&j(Qs+=dux3MO`5VE(KUt-2@H;*vK4xdU`BBhnrFv-(i z13OlZoa}8L_Y;mi@k6!;cn0BLBE+Ygk*{Emz zH$p21!-or1R{xg%_Y1it9LJm;_Fy7tjXdFYo?P9_IeV^aKcQ~J-)8NZ%4ApCwvxJ z8zEc#LeY}gG3IfBOltJBoR#kIk6$-RWnfRq_u?X*mDd(Bc@nzL!DY743* zvwsr8b}mZbV%+%o zkMKKCennaw_LkW{+F-xbL+Z8KEiL+> zQmB|U0Jjzp@PZ-af`G{tp@oR0_`1|AD9129Lcy*AM4 zVpbn!u@B_<%QIlP7lx#Y-r;drMR*@EeO~5Jp6~eRRquZYn>dB^=im|Z*-F5&Q+HX* z)`Fe=Tv{&-4B>w@@K_R`9MvdX%k8>+nb*D`H1-(_OA!p7362VWURL}oxP%riPi5Nf zq@3dB&*hBljBX<-!Q+71fvFG?-nm~@v5IpJu!))nVRgA>n|GRR?A-Cg37yWO5aW_= zCWI3jmK&}F_n$D0E;c=uYKIlxla@nP*5*IBmJ_*S3`N;JDO!fZ(*^QTYvy%n%C(y_ z?|6xD)?PHV5O_uBtIu!OLj9~8!aeC)ab|J?^ORahF zMdgFE3ABmhl`^q9l1~d&KeSp7=bs#~$<-Sq`{wf34n&k+7+zm+O*<-e65~wVd(Q5J zO;v{9G4FHBN6GpbV_Gr=%Lhl`oKJ;o!^xHBso{5a(`-U7iNPwXd(eE19JZ0Y%jDb3 zpG_bhnNtVsU|(m$e?gbrYYW@Sqb`cV{MJHt73~?~dMN+}o$W z=wJrRo@_+A>7e!#ImEIY+1#5FtzA6iHMO&Rki?kcBr27=wBdd8`BpUL7PLJyW4rY; zzgl!wGvwIhp0vF4d{hz?5I<u{YQ;7nuRoUHbW&6>h3C$$jr+da0x_kkU zQ96t*p7YVj;&z#Dnsxbf74x<=8q$$IKJA7twy13&(+r;?y-d_j+4x*#5J0vqn-gPDS|bYo)V6aRD{-5&ky zOlt={J_D5!=RuwiUEASJ194lZ@>u%P76_M(b)u(}#VWu59_jaGSUsa!WxIU(Ege^a zCAND;DE2YBFpwzao;>TGj#1Lzv)+G;*1G0-+2M#xA_Lp^{_E^&uK3pqgTT5oCGjp2G}ACbx2`Hj0vq=~BCqcSo%L1aE%+3)ohHHjT=D>^nUEGW}Y7aqZG ze)k@Afg2qJjxTtNPlWlQzK%=7Mu}p}7w4^Ar$~6qn<5V!&MapY_C0q7_q$HjNK&!3FWy@F4%22!J`Wu@XZRbcGSwCweYd>%~u{AZm=&rWdPeUa^Z z!R=)xZ_5N`DyMd-vI4?7sC^$K!`7mXP0Ser4+L_@$CW+_3JSRHjmpVHW@?U#heyMk z;`7f)e4G7uHnP-c_j=8yw;PJH8u2nw+}^LrCy6hPIcb%_n3RU6?On#S5E<+Lrrv;e zFA82aP5nyYCXW||ty$e~ld|FW8eKOk@9!q{#EJi8)>>kTo1=D}iAqNNd*QdWWd|Tb zi3MhLvP)Z$e+7oQ3+(~@&Vl#DXYgCNe-C$6~am%oywrX$W!aIAg~aexVJWAG$ba@v=E) zwa5zX^Ov{a>Q+1E%rcTz^*=EPL~#JMuFxEjL=+73|{k0*qo^y|DY3KZ?7g`&>c%pMS=d(u6PtzKw zJR!cEYd@y1wA|10;>}3Od%Atoi4e`J?F%>b@+G!LnQL0>(rld?Bm%C~7=+9}>*|u= z%rp2s6!{lbIFa9xy&e|6VnZ5VW%m0BzFu>(Ol8Xn5_h+xgwK&JrkO%T>PD zI^Liw(y4%7Xe;~Lq!$0J+m4!%w+9slTVRk!Rs%OQ`$*|*<FfRWRY~1H zW#`^ydoK$KN;tLn-jtNV+imTOODY!1ox{4Ia1$ z1L6&@e+#pLn9jeHA%59~jXJ#~qG{FL*~z;Ja4$H3EhK;Ed$V86ee>1rIGGUbz+Cr6 zn4S-BZOfs?s(vd(>RlScyDyL9l3*p3*sTid&TEceI^=Nt682^n4%9K*>IwZpgSs!^H_STJ%ce>9(Ft)SVx}HANg-kc*VXMG6ee zM#aWiE0y`5=&W2oks7ue%k?o_tetUCO(*(5O2+5r#BWLB#ZCe!vKB`cdzFyQF``x( zx=G;risUo^Gqn@L|2O=_?clhC{@;sR!_2J6xRf0fyc&V$&BkTva{AysgQMS2E(PE% zN%C%{do3gL(ZbBABl9eYyJwmkk}2XpN64T-JIQr#QtkO(f>vW{v(ZhkyXzICjfWa*()gZXbDDQK z|Ah=E@Gv96yn6QY#hdB*(eWdW04qWfIBmE#zBvfqI>F_8_4k3by#9?(#a>Ez^nyKy zW{uEz&7c7H-)qgN|4{Nw(Jn5JQeFGYfBZo@H$WZw@G=5*(LAc(k;AOkop*3Muluxm_Xi6qE2@su| z|7tTGyVr>joB$7La@i@4O|oBo->*4hO0=>EXHh9Kqm5{8v5^nG-Vw{=ZkL7G?G6T{ zGR>tHu`H=9EV%BN7%)NmU;G&OR@;BQdwcmomDnkR`-^2ktG&(lc_EZr1C%p{&58C| z8mvuYAD&4^0&V5kaB@FSYGP&{?HEKQL?sBBo99htE*!LZxmWPfZN1IM-Q8VX_mOgh zz)D`t{j2dErC(N>T_QvsF9~KFrS-J`GIi(~W3|v_Y*%xesY9OZUx#@|vnQ zxm-9+$ak;y@ihSCQQ;HUI|0k&-Xwpm>lFRyDPylolgz7=eVv4C7M4(!CYHM5a{?0J zOk(%2Q8R-je%jZ|13ibT8CqQAoWB<(J$IFj_xW(A?^i1?FOrj~Wzo*8 zy26T;gu%r8ER-kvMUOXLUE~d0g=_C{UY5#~;U8|<<986Ao7kndG04Ct@B5oi;W~Ae z9DX?2aMwMxJK|hmZleC#c8!71;491>zQ)yU`#TXNsnz%W2-!xje za$B&#IZrbTg-iAH3D-CG=RTA!q3P1P9k1*DfoS4h_!IoYuu7A9;KPfjR?GDzM|Be& zg0F%L*wI26t9IXNM?1|4@Xquf?$z42KWn@892vXNtH21~4s0)4QYhXEHcF7^$S9>6N?^?NG{<&=Q?om^DMj zl+&i4jMZ|-%?BZA7zA_mqKN2+d=9%KG|J~-a-qSS5Yu$~HXvf#y%6&wZQH%L_$SW; zH$%hepIFItLGr;HmP9|O0ab{nPPdLVvWd}19oQsmg;`Am?>^u-`Dn;-YRma8ux}RY zPYS)^|F#}DV`%qxGEX6sI|2^L;PP=rOQQ9doSN$13RB*j{6fd+{&(`kB^^v)iLs_} z2<>8K1>vtFj;wh^AvAPY)GdK|1I8nHKHOym`h=4U)h253VtzDvWv`C}ck#+jl%b^| zEL~y2hwaJvV#M@;q<7TLCysZR(T_~?u7 ztG|C&%?PVc9c62Kf1IC0;APBCb(@5d4uqiNwUEOv4YPW>`@ZD00V{6!%G)}zhhz#q zw7=1ew@Xzx5Vx@*8>g1_K3Rf#UrkB-{F#z&zFqgzX|UH(ZoS|>GeV%-N<}sLDfAH| z+3GYntrMq-p3>0fH!KHz6@g@WuV`-u=Ulex>%V6Sz(TS7^KFl0Q)Y8hxTY44rrw|W)D~+^&qDXKp56Ux~*&^b~mDm?H~E(NX&4T%dc7b80ARp;41X+e$W zfd2rh0_vLy;WtOpk2UbiX3jh|{qoHiX|QrXFUjg2H&+cF3!~|ytm5NToooe&5Z{?? zBEgQ_4e)TH+HL>s_HHwER&yoPbu5J;;Wwy1Zl@Y5kWWxfhJhmDx)xujH(w{8QaR)a zb&T;kd~oKgM%Ko~VLK^=<-jlZaYkS-0OdCL;JARas;Ivgn2W4AFMcy!@ z7^2QM9DK^69tP$c9gK|$#~Lpb%GwY?D7R&*OyQqaHgQ%f*!9)^Q&L)@W)~U#Jd#r) zFrgvEUEv8KKcy2bQ#2<0|LuIWsW&jYOj@#!Yg%{X{_h@NqNN)>1vlPuR?IAydZai( z#nbws)q_xe=lx$rotFY#uHyMBSa746DxPnjeLk>RuC4CzW5JEr4l88(Z`4|&;(1Heb5aS@ z_G*<|_r5DX+AGHfar?T5pWGhU^t;@ylC{uM3aivmdFxr$I#tCp|M<~}6U*oR-MBCa zSU68o*}Ao(LefIs!$vCi`LB*02M;R%t2WO`znpjM5#4b|`C?cdPt1$N30tn$iC1u_ zcwV|aRpq3>1HK0({ciKCr7R4U;ywpJ9Q;pAk6$HMbN0iFFO1hS6e~ZnSXS~jsY%7(2JL|!BrANQzxWJ)h za^uj`ln1vD$n+o6+pWw!Rliz8WvlS+LqI3&RnmTC%)f)}#yi2Anx`B8iwLGmX{l~~ zTD=EsX0GR?Gc0zjf2O%Fc9(8E&sZ-jC~YGHtZOEzcwTL&l=&c;FwuQ+f_8!Ohm#!@ z2M;TP19!{ghMfXCWM){O;6G~hAnL)Zj)-?tp;Dh2Kg%Q-Uf|@BciFv9<_Fh@?}8tf zg*SoI&ZXP@GuS8e?f=t%z$xJc!=1C&8}b>=8OmK-ifcjTHqes`Cs^|RU?`9?*gwC% z?x*O7^#{xwZ!0HtrvvXW0LFIa`?fQjkvnYUEch(gZtU+h|5^IL?}7V)ex*mA=NEx3 z394p2t9U>#@e3B_>?^BAT*19m)HipwCjd_8<%r+fiNLB`qh<&3|-e2`!& zVVv#peBzxcwq^{gfz2oHo9B-`76&F%WqnUzNQ@$~LLgJ%=l@x{SEVZ^PMHh5n3uuR L)z4*}Q$iB}WHa&O literal 0 HcmV?d00001 diff --git a/augur/static/img/augur_logo_black.png b/augur/static/img/augur_logo_black.png new file mode 100644 index 0000000000000000000000000000000000000000..5256f232e6a01e630a43e9a002ec94e6de915b26 GIT binary patch literal 42763 zcmeGENK;-gWOizyINU%1XXH``LSD_MTY@*Ln*iCSV}I!onhc^-|>>78Xt%78bVJBfR@x z$p2vZ+<#!ZzXK{^6%8_NVPPp^y;6Dc-UoX(;BmFp2ha7Q2NCp%T=WmMhq)sNgu9?a@w^Y^dbymnYqupOxG5KSzWuerC^p zql&(qoVi=Ln)63#eJEa#*=T^Y$@?EvBAbvicfM72#nR^s@!yi@$Z#waiNde}Pw?75 z|L@EHYVf~4_}?u24-fu_iT~q;|Ir~{d-I%7iG$NzH~d~2$k~_&^f?>t%&s-wbb|?v zoUH@KzPeHxoeYJ45C^tlGSIVyDCtqi$DISiyEOEH5u~~H|LBWmcAL-movMu*j+#V# zc6f)<3nQ0BNbf$5^*x5mi@=8L{A&dI&gP~ZDOg^lH1<-9^8eE&%@&sCjigEND1|*B zQ9mtCAOElb@;qsn)k#>^VAsEHfZ`gs1F^dC+p+sUvkMQ#rjCQJs@XBZL!rcuT>eIo z3N&zy+?^_Bm_IZ$FYeCbMgv(~Uc~v|`bWqG@4Y_BHQw`^7U2@8Rbvdg%K*64-IzBp zSuUUFI9+D-Y1XkyW7fGTV8*F#58TcqTp%&)P+oDdo@|8bUr)YbD5K^Tf7utuuh|#B z0{z_(kG#%61)R@soy|I_SIr0d*PaZ@j7>t$6l+)4*S3tRoRb2-BHo-AgNNmVKPAWn zrj2SAzun^8L6v&9Ed;vP75h6J6+_mVof1ZwpW@8wBe|c< zMk~2>81Gs4@mIa6+po2sv5Get-LE^aUMXr^aIBhln0vNY(iUyXRAfkvYO<^Z_3EgD zeh$?MhYcWun-yU(wiGecGx1J-Pa4*G{YPnwniOP2-*_^)7c>-CY<#E9Ih}yK-olTU zls9Vxtw73dSqrt?|64#pmxMA}-6YRnCz1qVgF%Cog$g~Ffk^E)^5d6=RfUL|=X;ft zrdvy-kZSKiKvs#$2|L+$V={{faYM3D(OKZcRA~zurldW@S*K{SHRgcm)zw@%-a~h@oeyI7*Lkou>p-Vy;Y!ANp`f(8Ulo6)=MA0OGI7?|ip4J$7ZE=*$^5nnMhv>z;1 zu@8#t7t>Cm&a*&vb@ET|`rREM?2qw%l)XNy-<9oe(K-nv$(DjQvR&bUV`UjLIY z>i-PMjg4Z6)!o5Y;S>no;hI#F6|?G7nMPZiC{w|m;-*G9&$-!%?trlsmUBr#PFCj? z@v{yo+j(}3~UH5gI@ifFRSO5Gzw^sR3Hc=Z9-or6PT{@%3ORdZ2Hmusl{JvutJCPYoI(IO==+Jw1g)auG|Ao%JN4%ZFU5M~~|Byi$xR z8)3Jr@q2x~;~B2VlGN946;6j{3VDlbJmB-jn6<1py6Ch0NsnW$eGS@MN!lpBMLtz^OGDGTr=UlFF9q|3mou8nWU%=ezRO z{4p{E_by&fDO3;HtDl4?tba(p+&W~_m)oz~Pf|@dl zEf@uG2#if{!d-n+CSUTIQ^9nDqo{(vM`(A~n+L(IrFmkI#n4fEL6gLV8;z@X%rrlk zWNe`P~l*-I<;Ny?C&sWV4 zI#Qy(9~KrAUVP3bIo5Yym1xyX9`-11*W(ekA~#Mq)RnUa1u4Z`eL2NJ`3f~GD}fhA zerc8Dd%P`swr*U7Yr*R+cewwwQ@7LiLRY`M?h5&10rKv(_s!_%1=Ej{r0X?!{|AQC z02!@iHWDgZ{oPmuufRQnOURg#dvf5|DR*LYt}msH)%5Mv8EwI5O(7}c`j$L9_L9nK zj5_#{mZR2FPr}MSgmp<;Q%U3YyfoRB`2~%fUB!Nwg6c3b*KAWlxsyMVokS`P4`gNz zVXbQlCO@O^rf#+Z{Y$2J9)bGvB1~NvSmftO)uwR9qsQw?WsdU5(R6Hg zyXn793u9F4e+(je5{(P8UF}cc54>o1d=5kce{;($ybCDyyng$m&P@U_8dG>Td>K=K zvN5?QAZUn0ck_6SVB5VW<6ne(e~*x=r!9*C2*nOc2z{!7XtC%IO9%2k1f_ z-GSy8-sOl><4E@Y4u>`ZN~)d(vr}}ek|V3ijceAafIj{)BAvG$6~p>WCr+*x_{@@wM(0{_Q>)L^ez%3ZzObdlNM zNhR}Ppv`sUo(*%y(@QVpWJTte+iuJ!MK>k@){VnSq zNflG`8`&LHN!s1de(QhAjOZ)&^>Zni_rJauvVsEecmB>g2{%_2Lgp_w3e7B-Q5MD4 z_iQ~L^MaLbYWkZZTTgVJYPl0>z^3vTIgm*^qNm|(rf>V(QB`GDvl9FP*+%FQD})B- zlpt7GOr~-39^ha<@bJo2*1a1$sl^dW|8s4QEmT9l;-V{1X+ziR)4Ba$2NIv2$AmiV z(mAwg=jNAFq;JG8eR~jxmS9$XBF8q@U@LM12lu!7v?eD{%h3n9X-}x8vQe4s|+x zwOp+cZvB=l$FcB0JkY-F;@7p&ypOd+MhK#FthY&_@rFG;FX@zBe=w`iMjRt0GjQhm`AY8=vR7~C+-fp`7pHrb zkq{xJuNRO)h$pcx!>B#I(fE$k!iHv_=o?V(mmq3em$l^uybrWWgeovbcD6qTW(-`^7&EUo%`6~>i_{maK2&I}R z&FjJxVTo?qUc)*X)C|H+rRQ0ES44HJ$BJgSMEN`a>0j`e2Nf21Cw&$_HJLa>F1=Xw zBM+5Od$-OP1Ak6`#pHVQ&2yg;wOI_OKUE5r6;etE(zb~A8O~bZbL`-zZ#1r_$j`e@ zA^gIc&e;5Q4|V&81 zZ&&7=+?*MXRT={vtMlv)GYa5xay==gl>$t~xkGIAaSE3He&wF^xPhKCv8H`f@uoxN zO?Ap(Il;KAy1(*%p@tr>81}&qBZ4U^OetOHdO1CyqUH?6mKWpKSCQW!AGt!Z1 za8|*W2Zu9F7-86NKLVUhDS|At1L^Ya;Bp)AGKbgUsh__W zjcU#(4#y`Jrh^o|&W+IivRMHM8D0{TC^-5_n~BE)re{Fspy!9o`rlpdW#tE`t2B%B z0x;yY_chTs<$~;zdMfx=M0TN>^NoTL%2s8v>e<35{PwP(Y2?y4z-!$29tc}Rgnms5 z%a?})rg>5&V7dhV12geZM}DRS+|Q8r3oe1mnvsaQS9dd)I|WS>mvx0Gn*eQz`^E3_ zY;b2?i&=J|w!voP?n_5}fqn;|E_6usIDGXhkR=gOVs;0Z(U){}u>X#*Rr)kmv;S05_eO;%HVZ=Kd;kdreMLN(c;%ma#HAB!Uv>&62t1tbn@!8q)Iz-5`Mvsjq14szkK!I3dR=&cL|M97&0VEq;Y@kPp zqy7`h))aSy@b>`IP~xuB7XA)g6`yfJm(miFzH;rZ&DEwfaRP}*!~mLiLLKVU0AJNk9qL1Wzn71)S-8!pZ2*{wwE z2~3^MqT+>e6`?z%)TV}lVJuNzfIJA5Gdld^B+0GGyK@Two8wNtt1NgWH9s!7Z3!YO z5APOl#J7C44!{|_ZWFX>2?`jx?X}waaQj3INZ8!llGPCf zCuU8PCd+$9<$H!l(`SFdUH55|)}Sjb(EL9vTVsQD@TOI#1(obS>ArtNzFtbI`ECPj zS*T1M$IqBF6bc0S2O;lueu3l{ zw3esd{)9Y$RK8!JDE5d5KiM$qW0^&mJ$xUT^^Dg))N;b5T8%z_aw24^5cwUr-EidA4 zDy*kcAWoF>8Ym~4BhC&!pNCb59&w&M&yTNBZ5zusK^X4FGek7boQ2&`*}x*J!(hd& zApDc%44f|E3sg=cdZLXH#AB$aJ-3{vu4kxBm!4Q|;4c+&_zPbmp-)VRPc8s|pUxW*J|GYzivwX_bZp2V$P= z2lu@07wkmjIBr`aQP#|Nbh(pd|Dob<*!A{XK}gu?MrV5AS{gpK9!M%|R`lT2vS6!? z{yi7F>N*4F9LfjkW5JywXw!K#A=!@n7R)}v+(;Cts2s)zrY61c^Tl{8D(k zAt{51+qq|DCwF#N?cSt|mEaSK>2d@Q9ugPFCPC-Qxc^Jw%Fg!wjvEd*(DQ|I#eFr} z>kv+Eokm{|RqmB02x)^ZFFjxI&$En?LlX8Vlgf!YmSCIb6J&vqG^z>N)#2pgN6$pG zC-upfzPU#7CJ_d->P?#Y*rHS1gH`*9g;il|njn zC>%?$9gVjo+Qip9KQ{hd>9a{7KYAp#>R6=TR{VJEnMC31_yvbf{p_^>pRWn3s&5%- zSK)pAF7JVSYJ-+AdRxHWtaG142Tgss)(y1-qf1WH6#6;xgy4aRkW8MjBe~>BCX8THjqL1N_ZsIELkQ|mJ!xPa?_ZUfiQqP_ zUE%i9Lu~BbzD6n7Aj4`}Ow{%DvlS|{>3yKt>#gr+q_^jD>%-4{d=vIAgm(l@zW7hZ z(bD1oXT0Ps=H*2jp3agWc1{ZKetLp&ivC?>%Pki*tj{{96VG~kNrblC8+vV-IkQ{c zn7_>`Pv&!yltx=n9B88EFC!iA&no9!--f{JA~%Ia$qu6d>UBSHYka!JLaB+g$KOxp zd@fUZ;7%jw8SH4J;Q{Ad22HeBrdkb$kPdb4Daq{r?cCdL1sapXO6S}Kdb!&qe`}1w zKJ6Ayv_11E%+p1o6~r1VlOIDmH1%@ePNKp1s)tPTlr6G-+%R{E9xvl73dUxC-LH%; z23z_@KD}PPE~l2NPLEcaKl(f>>(_X56a?R!?qp4j!H@6@e=Cj%fDB(|-O1^zpWX_D zIIbAQ43#!o@HluS$rag1U;91%hl$o*%?HWTj5aniGJl^PO}{QHYW?FT`>#O zewp@>xk=Ne^DG}I_gwBIm~!+8N&4?B@=!Z63V~1zB2gW9ABD*FDm0@}{SL0E1cra- zEq|90)sZf+5k@g9EYn8QB!%tdD=Ihx5ID(U+VN@{cQ5eTE2_QL2#HBp^+g{+jy%l^ zuD-vr4t8zllE4SFY2lrCMT_m3N8aDWA}exOr*uMV@D-smqEn{+nE~<35N+-0VT-Yx zK<J<}N;YI}?Q_Rg9ir19s19G0H=5(;#NhJZP|T@aHXe}{NNO=zLSq}# zAdeYkuBST^Y!?Z<81&mIj3<#=jSbHG*2M~`Z|azE$*EKwv}Jq5LGwDFI_u>lgDJI+ z7mWHDp5U38!dB=G2-VV-+dcQROZA0*dT?m&3`HB}ut$~$z6lK`snmie|KKg?+Iy0^ z&BJ{h8L$!F6*e`0ZEt$mrF+zOY^2dvbj%1=-B%e==q$dT3v@q^s;Pyqw)Jl$SgeSD zet35_spVZ%g2tOQCaUGLqY$PxV<2H+|z&IiYKL##d%1M#jlkYl!8b&8mN~g3A^+vK>hgxsSs^gO@u| z?QCaGS7=Y^NAqC`(D(x%nHC>Yxd@*LqkfB7{pqQL25PYI@(5&@L~`Fc+~v}ro8}Z* zX4Jc1hBrXdxs^(zk&A6FI7p2i)%p|!v6jDt9Es6YQwwM)3mLuJNcRn9ctC3b(vkn* z@G-NLmr0A1Z6%3cidSe%oA@pg*3h~Ri~pPdFvQNn97Dw9Xr!erb$yl390t9xHa8*+ z^Ojl!%e03IZeO;OQJN*Ee9$i<_9u*)AAaXki`FU3PMDj!tk`2C^?fP5U%gVFAiJJm zIhvd7w2Vun$Y-v#sN(vi6#3A@*b|&Z$ADCzW|g;Z!8OZCdZZ;FNyn(P3~;v8?kb(A zq#FbR7l(5rtdfLEB5auNxS`nE zs2x9CUJCAxtw?;lrSJbRpaYEG$Rc%rx)p8_{n#Tx*@;%O`Osy@FJY?HQ~ZD`63ZZn zf0ggu7Oj2%L4}HfV^giR=JQkmllJD%^vwq;ms~r3@9w)ff4rmlfzapgt@s58wOFMa z&sXnKc!A2t_t5pyo7b&lLubaq4W5YjEQo3`GEPSq*jz}|I+e8Gc?HH~4G*18V-rxw zOvlt@YiufOfbIMJzI2ee@VN=qUJ*S>Ly(Oj2B5+pkuptoN8vhU=OWPi*pjQXuHNi?WfC=2VGA`6EsGx`BrE6lhWVq9;ohO z8TG%LO04I!A#C&i8oxK2{`eC32=Cmbd39e%qPc(mB8;x$$Vb5CC#)Gy`%U0z{`C_YOAIFub<*kdsBaJlPG7#-mp*C{KT5x5AHL1$m-^2<)tns=jKW<3uoWwSYx zVl?|QGg;xpX;0EdfIDBUyx8hPboz_ZyZVh*YWH8V``9NubeQg{Na@|Rg`a;%YbIaB zmvgH=i!86-oO$COS7g7|oJbOAZV@CAZeiB<{$Y_TfjXjR!w9nZCM?x! zh9}iy^wrktcg?r>tY8UB$BP)L zwU@!g)!KaEV3XD4zm&DnhR1di4*4E z?ES$d6EbVHRw2;>27j)pJ{@wieujs^OY`qyDC`iC|2S!uIJ%~*|K}diggo$~%ZRU! zh9xquJO264_RY;z#3#k}pN*)Bp@_tx+3n77% zAn{~TZ8lUulYK@5 zN8Ncs)Q>;DtqrlU?mc3y?Nof(o&H|DzbYO2(Htcd>Ao^^6l=x2S`=ja@hzi7ou4zhmz&uc~JedFdNmEOzgA^5GWQABWphr`mPX7D0hxG1nnxA0YKsz zGCEd56CVB<%_lYU6m%5mp!=?cA**>0Ng1K7|8UnmiVW1)ia#$Z6Wjx+;MThr$Vjaz$8I2?im$%j`53~s?XR7vHIc58A zdLR%;#l6Hx&M`TKk6fuyicnMVj5eUvWQ;te7A~+yWX7a~L(|+N0WiMR@evZsBI4v_ zva%$d)VA7BZ?TXig>>=yhGDoS9l)Znjf)@8-3+8RYOS`QKXvO{9%^y}q*>&JBvJ_j zpz2jMZIMAJiibWs`@G6e+zf~W_%9vI4s%rn;_GE{SOa=AJjw6zYnbRd+zPUq;hnMi z|8ML;tTf$at;&!0Z~LMNeP!4kuYaeB%)8Mu0sHd?4z2#qOWSU;p=k1^usOEnds~tv z6hRiB0HR_oWN{zIuFOy&*Mf^*KCT<#buCN*2wa(JxacQ(|LQv}5T+ z!4vfyjXcJhsBE8Ydqh96B+bXz%r)$1+D@l3EKTdiu2>^BtEgjCrM;siNk9})qBDbV z?#QO4;Z1xs-d}61&>_gJ>8;4%PfmN~8X{vc2#Jy|la=M%LrB&uA+^Y z z*T@h;d#B3buw~u|));MY8=jjah>+B=fQFQ}nLeF6ena(_S!(ohI1ixqv6e}vw(Jv7pdv5Gj#jFYX@u61E9w-e;Co`ta7{jT`KJyhzT4=%6SVaw-C$(%@kjXJg2uXq zNSKDzB=j72Pii#4OV61TYyjkg2-|(H{>0hcX>1uDxODeNI{EiM=+ehqa7x^9{~6Q} z@cKTU0ImGE$RBD}6DN;Hst2lZa$d?@zs*4&@hXX@35=)t?n@B9x+R21e71AkW?3{D z8Y^5fASX~)bz1YoCQs7HmgR7NYm#iYeW}*abUPg9*dzWw=x`R%C6R+FeWf7X8Y_nw zF(_@PJKZ^Is-~*m$&nZ6d57WD6N&O)BuTUhfb+ID^3wU)AI({Aeu3;itHzv?mLm3j zzE?W?^UMDpVvBI>R3b{jFCn*R!aRiA>Z_HlAR^Z^oIlPT$=nvE>_CineKCyc{o za10~z#pyNq54h0~T{oAo8WUb;LI0GbTp87*)_}u3Wm8kmj7+MgEC}x?(rEPozQZqk zXtk+u#E+C7aeUM6QpRh$Kqzn__5=J|dY|@>(4zfk z5zjkeaS2|g@SHy}lH`2XUW5g%vQIfKLb%O({NIi`mfu+~tlKPHLmY_w!vaEN&XM1i zRP`~r%o75Y>KGD69}wRq$q<{f)hg-PKIJD`2#?7%CDoTw+GNaK4}qa(_>R6x*gK3b zMRkILWWRl6^hvnJ2_0T(Za;AMajKU%@EV`x89*{^0x&R%6NHdBFti2So&LOR`CZv^yNdeZ65l|FvH^P&hzex_$e%Q{`>5|JQZ)0#YOJ$rGoH1K0Ep1)*7cTf=b z3-hZ*eK1aXz|B<+&&@V${{!v)eB%u?C&t~#2Q5Iq(f)I_3#4!+8BcNSNz9C2QpeeQ zD@aIS)C-#CF0*spE3fwJ@@N}2RkuO}!Jrx6jnw3F2|@-LkE?JNQ;Wf@2OI9)`lm+@ z2fDd%cbRm8Q8WHnCP%d;{Ik;!mcqJ~=!_0=ixTTcvNxDV=ERF9fb{9yAQ7Uy?b}Th z)*~WDb9YODaVVomR&BC-C(O-5GjhyN(CM{ltV;jZVPQ-+zTQ5y6rNnYLMFW4i0{;3 z`$ZeA8{I}1m>utjhGl>8LcQ?3Cj=SSI)uEnx6yY=!WTR%%bVi<247gv>HMtT^$|sv z(9O+4hv8P`UY-V$6@djs|6}hZV}o-?1NCD66Z|so5k90voLt54?gPb(THS(U4Fn*4 zyIOA=r!K8^K!=4zTq!O}em&}@-=Za#!-DgEI>n5hWaQ~yV$HCG@=G`jH6l4h%IHz@X+>|rJS0j_#U9_W3t`a;+5+vH7GbBu=XnFpS_MqaDinKu(m3H+jk_n$l3d)5tvsBieNFH_*GNfHF1k)WfvS!a><5hG&EjDVo2j6garle?HsteJfd> z1M0zsbT^j4pH?^)HlvA`phdNk z9)9tWpLGlfZW{Mzn_i)=*HjzcZd&ukIJLiLD+Hb#D+4Oy&I>;Xa!c!5@1kb%n&$l}t#zi2a~rdeqg zJh}Eu8e|nB`asF|&3;Ydvt=hHtqJR!S%?jeoveyS@kNw~0z`h{I6Nr>Yt+;4B`I zObOnM6V#3{^(7q*eAld5Pv9_rd`?xq*}tvz5eMUS$5gKEz?X8pT>GP_DJuhC{32@2 zKMqZ*e}}YNk0wR02uhtQR7P_p=IM9MKJu=-KZ=KTV#(jSo(_K-yx6Xk9cIj{=x7!t zn$jD{^ESAP^h`6%1+ePKjXu?^x+U5H>Z7JEJ@&YA!KnKt)V^=R?YhSAzXMU1eFZCK zVw{NkMFWuP+DX54I;N>9vs)6B-qJ>jt{=w0=26lnJ-C7vQ@o=4OT2gF z7tWq_d2|8}WUllvS7UVJ5!X`t))6Tc@@K|^*%iJ;bkQc}UUYcV zKb~LEp)}N>QI7NRhOao5bHBS=kY3Qe!`tzxD@dZFBf!C2x!P(JuVu~y)_C!9Z59P| z?y}K$jyE!6$%FYFV;ZMTrHwV{v4RynJo%o>U!i900S8Yz-o^2o^(LoV9QZuF4cwVD z>H;}iX#3tLRenJ*Fvv?ivj9)O`5n__uep(k*Gz+Z5NEGYYfH98KZ!#-_I{V#q0r&B z4%?NBh7Q!VPYnMlEs1@RR83_V8Bedg!c>XYQa zUg?EpuTV5KQs$p4cQ#NY_(sn-(D(8*Pa!oeVD!}|9DhQ-p;d00Yrg1mw!f*J%2bjU zh0#%ITs>bTxGK9dXl%UuFLXybUDHD4cbN_@JluHzOq95Z9i&vrPRJ6_v!@C6Mz1SCF_Lwy_eXJkr5qu>C``MnHU{sJf_Ed zB1f#}In?MR9*cb%^UTMksbbAK3oOw1WWUo!H%iu${79xMQkKU9zp#JZ_7GO zd%Rb)FIdch{jhw$xrA#$RqLbwz^J3utz)?;UK)yKJ<+-x*Y zytVm~V<)E?{KxS!rjS)?V!dj{?(gYr)6MMdMbBOpN}e`A_YIXI)3E`c>XOzE?bQUp zu4^Yr9hssU6R?X`GED`Sgnb!A$zlZlth5GA8uE~qCUW~tB_9S;7~GOdPQDSoLTX{6 z8Yc2$`9qO(UAxDeovYT2S0p3><%91{l}RDU6~nbXd6Ri#g{N7Dv7s{%vn(~uHhHT^u1l2PB)#@iJzV`cy2N~F?^SUv~Eb;$}juBLh z@cH9r9^Ekcpk41JK!_3VZCaEzsl%z^ev5_{frU8fX+e`eNx&d-!q*xjxsy+S z>x9V2v99_-*w?me-86l1*|5{U)+to_I zkk0`?@g4LZ#g8Da!n)iExt$IKZ%+0AudT3Xxv$ytD7a=@oTp$c$OP5EmtrJ$&}@F9 zH~SYlX6L9VXTJm?AvG9&(Perh>qMP1oLPXFQskjaQ>uFOPs+mv6{k|c{u)Rx4r_-n z7>QXdacZNreiOh|#Lw;#G?X|hFxG3;h~M(~W#4!sm(vYb$qON7D_SCTM@==s`F%g~+0EE$-UK8z`0@ku8`vXqWO65(q0 zbx7p-Q?Rcnq4;Cg#x~M*+lit?<^35yo(oH1H-+lp;*vgNtc6v#vDCk#^XH}2C#<1N z+(6!FtYE%sxR0-igK%h~pXqwC&`b+8L%K(6b0$6Hr0QMm$p&Z)#e4hg^!*N@!TWE_ z^)?s-8HQAa$>-%99v%q1z!GhWT=1Mo5RYL*f+9K%Jl;c7mw3b^_VcO z$j+svv>ImB;0dhLWO%wo@{#rQa(1jOz=OJjkfy-3Ii}Bmk@nVjRZqBcWb*yaiD{nHeKdr6{FSW(oYn*l5^f{nN3h z>(9Fu5nuF66(2L9xm{ca zoeoJdbT}LVY%WxMAWBywpmsrNK4|a|std%t06%4ro_R+GQ0|gCX$}(y z#D7*99AFwlk|M=43x;Q|c_EI*=9@NDq*9*pp*ry)=VY<4DUruvSCl^e^a{sbvKp@p zVNGGfoiVoc2>F4kaP0HW5!y!-U#9FSo9K#PPhR&Ds8dA=b%hQy$8GeJ(>YD;F~ z+T}P$uj{P+1BFOnCIy$(iFwq92i4<|8+_$33nC>(AM%AQpnN~ycl-!T4O$Gybv1%Y_1AcnMiTa=m0(9 z!gO^mh=Uz5fBln9t}nBP&LtIc5EXQi`CYRU_BCA+!28^o*vCb=&^7~8M=T;fWb5%! z=M6RTE(Tp{Cfzt`>YqajUc}jQProvjJ{?we|4NbW-Yy3}aba3s8Q$lz%C857D0O&R zGr_Qc++JCDCOB$$@QrvPl&yq4XcSo3^pK z)lr(VC| zW$|>?Pz#v;BcU<~o{)x!r%as>p&r!c&6L|)naD`;3wBcn0{MtLS+<43U5cqgHD=xB zxT@B;yx3etw#Pz5vY(AsEQrgDxi3eN@?}01xI#ZV3j95r3{;HH?W@gDv7_@%yEEtZ zXq&jf`J30D_>v_eUp*zmIT>nBqj7^>kjm_Y-6K}gbNUGA3n78qOW*x z=5^S`)-(g`J`B0YkG@?gh_21-fFaUWGGPZ4sd#Cs;{;G(L012TYid`S%KnI)^23(K z{H{NI>~{?5YIK(Sha|ZjngHizAhl}KdUrs_l&Sh_j!T9n!z#pw87$!P zjlH8%SIP~i80b0YzFGG3^RVm5^z`4?%BE+wMPzlAQRxrXzm1X}()S5PkOOMYFo?4u zpwa@>kC*PdudL71Wz|HNB|}Sk=kXB8tDA~rX>|&-N5ki#q`%X~&Y%87W!Yp5Y4V~C zO-~g9XBH_wE;b-GlH(Q|@$1$)-s>;hUlfh2)wtZ&Yx*`Qxzb z*E=LR*^~e05q8#5Eh3zyvaH>!2zu{A_TCnV}_!MV$;mOAN(BLgym%|iI*l-nF zh9$2L!SSZNQR0PS&^1lUx!>$46GMJD6xVXGiiYpFABr|iOXM&J zyWao~CzaKGes9$26ZD~~7{jX?qi=G&r$DeAb-{+G4Y%lG^<<~oM3l3F#0rzsU+y%A zW&9N|-C)N9lIsf39dL#-A@*{EE$Hh~r?kW z0R3H={oNovJ5!@He|R+z#sg1(Ym$5lnr zYP}X3^nB@z_Ckl=tum{x_lp0_8im({;C-CW`9@2(Sv-+g-kE2Ut9q`#8d8#Z=k=Ce z?SLL~CFf%?T^~hVALqHCS$9^$6{(mYswrBo)164$dE8F99yYOPB=>yW43&_2>Vyfg z3U-cQd3{y%%&$f3+lMsxeKL(25p%z3$}do_J;g&tW7l6D!t6JcHt+rY&5*TDwJBKP zU6f}%F+#L{4=s0aQRlH=vylr^02jM)=3)R(#9M%zrQ2y&tvxgZayFz9s;u8Xa@hjB5uiN4v6aI ztwQ5tAqtt1UzPP(zILm2D4QN-=KcI+*WDU`C{^@iMc;CGo&E@PzYE&JmNow zrT36jZqDtkFRqfsrE$h(N==KejJFl0b)3kX5e@Q8z3VFLUd$f-W{@briM{Prc1oOK6xuAX5zc{^PlzWN=@g&qYDM?MGnGqZDp z74Mo*W)FV(l2Y}}VMD`wiBCw72XLRzed>KWd0Q07fO~VFmy4tP@FL1`YR)QlOu^yvp z3|_&u?x66{4jv3`q={1on-BpuRF5qS9EpOV?)cyGMjhzr>_9c;drKBOse+r+7I6O2 zX`A%aLBDf@$Lk53q-4csKf!WRPa=q+7;R+f zU04J06S%N3-A-$3t+D%F6 zWaPgRU3UMuf#%6HSU*-Szlt%K>3u`g{|pDC?wMTUAqi|eHB94RUsEGG4TBo-#oPP+ zhZr%QXH%7%9C{>;F z|29|;s=B$?MEBSde?cPI=X1~X-?$UQq1b@Wf-CPCpWrC-@*O!A`-nDr_o0uvSkvXv zzcq7Va=V}c_lP7L+elZs^TgET6|G2YeDCc(wnm?Bo8539Udbmzh&@mfG+v*jOCaa) zc}uIZLG9#n#UgpQxKzMo<<+0*hb(EOdiyb>QwkzOqxDZEUS_Xumm2vW({jj;-{{cb zl39KTSQSmO{|E@=zwja2rcdA5j+)aNTjmjY=g@o8KDAEk<#=1FFhz!&>{Osj$znc) zsrIgigo@(KSVpG96~8u>JZb#W#sz+%S3k8H?N1E!zf8r9`f$bzv|X4Bp~WE zj8$=6u~vlr76b)#R_iNi*6hr&1I4T=dk9!-k4jM#C_qgtw6H5Upxw)-QHXS%%)T>{ zPAm}KHCEeQ=LQ^_*Drx1m)TkxY+5F^4P`gPFXdY)Ppq8|Kb)l>!~^qNaP*5DZ=I$W zgLqW-TkWHt1)k9w2g|wq-04P1^}1abgqlV66_hxnGbA<0G>7>>&qrQ}!IGF9%2T<% zC?0-7kaJ%*x8Gbm^(aiMz49~CKReTYo{*YJEOkzg;Pi~4pQ^~KA-+HhtzV_D3`_O;xb=&t|?*?AlpRtBkWVkN?M~?~ckmRn`(aPel$yv5> z7M4$-)9`|ITW1Twxy5cjTCNXW8dE=` zizbg}B4;h->L@m4vh3@|MufFo|7Er*`RnR#sz}GTj30MvtIm8=VL2e7esXI`eSxD; z3%OKu=nNvhk?M?^k+r|==fAF)bGvI^ZVH| zehAWaI0Fu_awQ`>t-f7WqW@@qMOXl4|JZ`va{N&}A~8t1GEB2a9^2(ZMJSGgSWYEy zW0_mEiq)<-e>FsK9_#BD3iS?f>+M`7ALo;o4X@PWMyZJE0BEp!p zxImF81Z2rSL$-?dCN|403BeZ}S1fR`i|=Q6c%eGidjjk)alG}}xU%Pr8JJP*v0S+B z%~t$@bXH?j9Xn{_?7cIA2Iew)91vY4e+FqxZcd?Zu$RWS8VOj%M(qEuVzeKLRlDA3 zr>I#@l1&PuUN0U+Oc$z&)wXS(bj*-#_23j5N>-+MO1*HmLubH)SJws=AcbZj8kC<5 zde|TS4Y3^4#+BRm=l>FDO{*;&znc*=r~rzIz`;@n$^@HQo!0T&ZC^Q&(8sj4D=10V z^l=JMAEqvplJBvjKUd}uj6L+&d!u{=Y<-`hokbL;KS9|$mQEpiNuXE3KjKurdU+J` zc8mK){_C+I$wjUc1DZ%{C{%37oZ*T9CqQ1pnX%_>**Oe+&5PqHAqZ-Rk_{C!0cjS= z-E`)xLT|qM6#QDGAyVzF9Z}lkdN;Sso3qno9q!}MzL?K@v5($oCkronxhIkph>sGmJcXf$2O!6ZB$9@}#pR?d z#1mhxD|yR)FQTVNzTIbF*pfMI9fh#R7yGC!@$1t|dZxOX;wuI9fX~IuG3k9B=VJfkpHZUSgahhd(yChAvve=F+FoyLMd-iw$#TN+U}q=I zaShv-GVeArGk3Yz3Id%-QEATI^JqFRu1LGoA%Rq&gK56O0F2-P2_&Yg`J1l`vGDDq zLI2f9F;Y$K5aeOVth41}DK>5J9SESPI4n=Y${CYY^qdCppK^!J+ilo{VV~F$YA7p> ziJIBCYEPh7#1V-RhpxU!sP^imy|oI|+Dw)(3qi`vv(nnn{*2(awu+~vXXkyrM&2s4 z==tS^lz`}7C(TI(iN*}x&84OVIe~qSuXCO!oN1WcvD@bO5o1Yp0Ejxa^ciP-H@l5B z&Ly!EbfkF$@bjEOzdM9vUg(9B0Q~jTR(F#DLexzMmSZco%BI22N7Fp*eDm2IV2vc# z=$??-JRCgQCr8;uG(=z!`Mo%z@625o^WqhbHqU-SII?zHxgNdG?cfFXqhk1ur=jZzb3PIAuKy-K@FT2nGK7x6N z@D=`#_z|DC_T|KxC+(GGd8^!U|f_$X1?W2{2 z3$rLw24T)qK>YIP9kS=erc2hX*AdL`aw)g|-mXbno8@0o*H-32&ElQF#{QXf8#V*? z=PjSzHQy>~Z8N=wEsY+wogBXWFJcVdOQZzf#`}I}biJt@UVYCJO`n}-xj?1IcQ#S$ zXbz2#?}?$7rA2WJa;^*~65QZeH|Kp0izf(%4XPP4tUQ#79%SJ5_?4~?ot8pBr)M{A z?Bk-83S3_UYPxlUi-mMO1Jq%{Zu)`-W2$~BYOo(qb|oRaYkA*uXwRZg?3`8{>6 z$cXH=(ggFOn(Y$2KQNZN(xNxx^WC}^`&Gb3+)}KrI|A`iKOX-4XGY{H6W1O1_OPy^ zpfqk5ePQAt5xWw^S2~5}d<;Qqgs;!HnVu!Q{r^3?J&|w_jgoO$F+FJ9EjwGGZF|1p zJ-rar-peKiuw4k-H0RmLo7Yo36!jssUH%sxNZ+H(rUj8p@JrMSmTuK&&7o}j*pkKUA_Ja_XDDCW8 zj{{i?4R~-e00NE&zOMkS^|7w~wHUq?RNoUs(WjnNVx~Q&hE)D^QWw;Qi{*F278WCm z!C`!5?KXjycqReJKo*=fT~Ly&M*?ymQ$7{~M(7@e3cxn0JL7XL3;G_)TkD4a$3X_wbY__FS zt$TYR)FnjGKD!`<3?#m$++{~srb>z$H;yAB3}Xc|LI-Rm7W~=um;{o^7Nt9`%(-j}7T-sv@_@VSO3}|tIxCK< zhL%=Z5y0G7|ZL?r{&-ER-sPRZFOA zdK}QTPhk3WBn=gjZ+SrMKWnpXQ>-D2+LbRaV;_uYBZKDo3Hy6F+h@+AhXr;mC36$r z4+|A?BFEKjOz>G2O)gw&Z4ZTZUYzY7REcr7qy~6$Z-`K|z&2A{9#LYKn9SaVbNHOO zPTkin%u(wZC#eJ1f!o;eNkMnd(VhX)b}7835e+og#UAUSxL2UL@Uhw!_?f;B#kJDG%bu39 zD+LXuYW_cHn)taJg@Y$vtDAx8lzScwNqRWaGzDXI8$brxH!QMW-9l)8gWFi~+@BRN zbw{Y_&ZscZQH2IFJW)wo9}@RnGEioWzk5C&SaH?Zaax^0-XKe711h~AuCIO`aD^DM z(XuGHZ#AQ0K zG5BV&EkmPoLd=EyoJl1RJ#a%OMg*~2Sr60}#n7MtzTfuW}Z2n;$=kSe1JlkN^V+FkpCGrhY>_9zRLS5pI;i~hGb0zhM zpBO)eY)mn<1WSOE#OA!G8dIS&OYiM` z%tV_S7dwr_>%J~hZrGGU)TYk^vaH`^*(S^?Ag$+b@TCH+WkpBz-bTtpf$l@UBw+4M zGD()Nk*J?ChEg*LC?T0%KM#V}kO1Q@3u33mwoS4@8o(bumKWVk_*kFDU=NeZ@%D?) z;MGXQrr;9xayoMYX~X`djmltJ42#8Zvvf?h1}WW7pv(c`ToI`wO~HOk^s}!LYiq~z z%H1dS*bYrd@Jt78gK+Rn1XJUjT?_^K5&J(d#$`+NKo}#?1eNZN2EJ|$r}nS!c-#Pz zKh$_u?R-J`=$X^;g>LNBEh z?Ol}iLyzikwY}q@^11H;=ulH@{p2yn29HWU+vK1ZQ`XHHfxG8YlnOda^#OXq6HZ!! z0^OZY#s0bMa_Zkdq;a~$0(Jss2gkWVR|8&~IhU{y@%APbf~A9~Yyp6KUqb#s>OLE= z#xh@^qk~?(OE553QSOa>-;PM^=h!a~+!Y2d7mf^om0R2XKj4u|do9(+(c799{}^Ox z9}>6x^oy{t(6@!JMZVadIe?GqDxtLKC!8U)QgW)fU7SfwmiOK9@oqW?pkpJ>j23@O z71iqmK;_sC5fE7ablQZrd~#@gLzuDm)+ps-#sR{8u*dAk((Xo5Llq-plGTB)#3Z=? zxpPiU^WEHc7Y?VG7DgRVqU#t6`WB&|-*W4d#|qi=KS}?-^re;A=KA7OqHlQuSgp|) z!pn1RxS=uL$jo`GT7rt42sm8b#ivILmbWf6Z6B7@gj$;wg>yVcSVE*4ZkF_G_PYRg zZW1>~T@yXDAmeB;*@G3SSUf?}Cc&4RmXVr?W?}I?8GwAFP0|F;P?uea&$tN`WxsYW z`{j4c9H-13J1a;)u|)K@4>s*l9g8uMD}5CkPX;T@@AUP8hr+)yEfQ?aEI0jQXqH5} zGli+}2}I_+k~x1+r17s)W2DcrC;M2%PwYFN=2}tNTuvNn+;#>-Chs@wj$*QBv^?rC zF7k9+48DA;+RF@xT79faz~Vdhqvm+*kf5YMFu{dqvF+Q%j6YsxW)$_8z!Uw&-wqDq z_L5JRH(z8xT?zbVY-G>oEd&YhMenSHQ(XubXj&i^4JqrF9!O6*+8ZzwnN0HQe1Bh0 zW9KDUW6#_)nKXat&>Z&eefj!TgP^}OGJZ3!8D?A0Xb=@LX@>15bJW>$&YlrWY1A)X z;T=XWy1YYzM?A-iP0tCov77`Ekr*LKxwOuHLj37&j%d*#$&W_847RQ_xe8PlqjI4% zmZD@)^omJ|A$+B@Tg~shL~cJx+wcgjL{T|aA>Xx6SFC=$m{H<=x=Zrfa{~*p2eg-AN|P4MN2fsV zu|(-s(=|wRN@qa|nNUa<)YqK8Tyk-(-`QSoGce7%cwH~bdt-3Bp(~Jh^?k*PO3)8;@k&xhS zVRZ}W_3(Pw$LT#&;pUcZf=g=j>4BYheDiG6l{Ts8jBQ(M>}9tc6o?MS+!ZC+(eB86Hth2MEkOu?oR5k(e)@fQv)#MPF<#bF4tI<5!PBmhJp|(m{#YmSH5P z3i%wZSN#W4GN}fi*1S!8+r>jj$WIDDz0tKzRfEr^POO94eOIBuZ8kvyw%P4+RhU%l z?at@zBJ36*V+aZ1n{?X%_-1}ZpVCy1Z!fzY=IsQ;7C!2H&(&0e39k?%fjjQrFcd6W zO{uMc>jG8vUPn%r`=_p#rD`m}uFER2;}7UH>WyTQTOVAgyr`EY_n$p8G)31dmTzVN zS;lw48w-8%#3e8AVt!O~*WE8jEC#vJ&^TkRkioDKsna2^onM7N4^2Et9HKlvBg|zD zrPN*_Kz+lBigGifqUA%q$E7CtR({9Kk8oBxmEU&M^g_j9nGNriEVzKy^z!5?!N3xX zKAUBxV|n*Q`@l1b+=@dYZSSIbBK?g4d_9Cb^nyWYNVhx^`s* zqhP-v+wZ>oksG?i0KalStM3%jqY7N4V|IH=gRp>BJt zcD_mwefleiZ$oB$PkQHEN>~rl>?&-_8zU!Fkxr;GkJIc;paFo{+(M@$_lZE4@)CO$ zUE{l;`v@=WPT7^YOf*CGH+N06W%Z!RIMBF@nkd}3P0k{pOEWhCJhUSf4dt@)yDU`O zrF>G6`xxHW8O38kNoIKE!|VK{i$V&R4k=#q{XZ&KolngGqRZ!|TUT z-(X{)B@&b&`?e!NW9P0q8zWrIG)Q5s4(ToeTSyOB&$hiBlSlNp z^C)RX=C!&-3#r&GtaLC{d!mF9cxKc)y{4`VC1bnfb2Cy zGV+2KFh8+czy5CqU~Txa zS?$ONJ3)NdoMXE_kh0x!2yvYYzQ3KoETZqu+VL*;ImE-JL&ZlBrV8nQvjB@~`lN$g ztUGy&W26;dI1~s4%2$U-0OG-7t${FzNFA5>9Fx9PSpVPSrYAMCZ^ybWm-(afX966v zDCz0b6&6%Nk(k3bnIz1Z*(nG7spv$x6 zBBn8`*%&XYd0B*ZJO&EBu(Ld;t!)toXeRtZ7L<&Q&_GBm9GknD3b-FU+0dK1@oMXeJiM$^tSxhE>_TW#~m-28@4+jnO}%$(D&6UM3+pT zp544bAuytzQ&n`&@nUK#x`+P^UIPrVL$E1Rk|dz5)4AzZEE93%G|i4js&`XMJeNEY z&ns4(zW}E_i9t`+F+j2X8Q<$sxxKphUyAQOk@M7ajDLWmto` z)@xCy6hkzt>jo=fenreJv~2xZ*MR17fnPe^;tj3_fKEjf`9$!U%}6);9ewjB9^DR; zp`H18e2Au*dFe38E;*Fc@pW%W@xM&LBq*{PnHfAUwmHaB2*}HrHhn$U9C$I_;=wK0*n|%Ux(9ck zzJHNyvF0zlH$A>ODBd8+aOTntr{&+%(!I>jkUi9rar`yd~TG(wk) zbd}mVN+tMAEYzR!kYQ*GOvPGKr-%15{)^SO*|Wu_NMX;v|4-S5iZzm}Mm5s9JISeF zB{qcPo^YCNw!4zV^;W^SBCUy)h2+}+M(Kyun)ytYOj{5ef$V|;)*^UH0Y7YawPbLhuo8M4QmA1DB(tgv*HeiFD|aNfNXl#+k;h$Tf@tBZM&g$i zpMK(^GGpwW7vgXpa)kUOg9;I4wo4#iu1I8UFh$eSPKa8Vr&t=~4^<8V%sPTcpJq3_ zCa{`sWWS7kw}02ge<*uGmPN;N*LO$3UR8XYl?yFW%Ra>TMop@NyQ0aN$#f)H^r9VnI}(^06@iEd3zJ zaC+!hgrMq(tnDG5z!8o9H3y+(E29hfF&#Ehx{MxL^|VL z@G6va0j_I`7jQztl-2!=xFmKcA&6sMw@@9ecC--dHScfDx_lXHLFE zLsM|soI$EAa{qa=m;*OH-F294MFF~@MM$fo6tLD#gQ@X;frEE3dH|?Ptghwo=^nx; z4>=imK7YmSH1X+Phou~#TW+Vy zSljHbLXIl668`P3?*+Uv9@H!YB3UB7*4lNgTD%FHarm>Zu5vpE`+5I%aMx*-+`zahZro z#uieN6MT2Ioo*Tbo)aGE(RenlqTmX(youW-@nlLZRcwf=<@Z$6n$G(-%MtrI98!NI zVDP7c+eS-&lz)nTzmi^dL{UwOm53{vVTxJD1y-fjUxH6|52Q8gbtX^1vCA;}TqJKi zVjdbHQW2sYGTd2wU3^_(jeDkZ&MRY~Ongli*uSJEPXI`{XX0#%oL{*UD2~iQf4&u> zS1B7=W}Tu=v+ZH2G*gbTKOzY>QH_%wfFJfZAGXCv*S^I|gm~?TKRdPCi)X>?=ejyxlS!*a@QXTJG4A~iEj2R(fY&-@QO8t6=s3(WQ$|Dh>=80vo& zx+&hPoFc2gQzS$k$AE4uB(}dN-@M$og5L+~&e{X+S05(ecKt|K?gBEzUF$D9ktu%5 z)1mRG7x6YLs{O*QK@lCaj90qb){7*cp8jH6=$UG0vV>C z6$~!qg@$N&9q{)a3GcMUkp=fm9VfvpJ#u|WSEG?P>)CXGm$f`XoP90UZIzKwN#W|C9v&V>FwFf?ep~`$)w6TO$a+zqBuK z7jwH~Q#|>PUVkT?!<999psN(SBvt;sg88^%a8vgFYP*j;LG_YmmSDeMm5Q84e-Fo* zgdjlj{pyOpthOyZ={+CPe~#K{8lR8waqnlIZ|*Q#wg=|%ODv1k?rVCU59+AcyHf-S z-lXFr|Cj~>TQ{Z;WH)fM;R~aBMOCmEhGh4mj>qtC6TwvK5!Fw;T1V=-JGsFYVPBAS z-PFV+)hr$f!w*?;ijGpy8K=3y>6PI;ObBW}$64DgOMvh)9^GYh7c!*5Gi@0E1Ua1y zNF`F~eXI322Ty5(6w2pImCVCMk?Jv*;u;n{;`EZ=XI5i`rjK6r{RfYt;TnF)DdK@eS820i=VLp-%FOf@EqNCg+>>g6FveSQHqvo-hx- z3Eb}E32cHf{aDNzC8+OkT+<*8{kiwo?|XJoUSWOuqs7fnpMyOt*?Z2k5PeKH zECzRj#@$weiGyH&Z^NtHo+GOvC}StlrrO($2PW)%G0(V@HG!mj)l8L|enKxg^R({Q z(Q_0mOKQTG5hTGA5~PN*^>@|tf^CTFiz%(Z71kJcz%EoDv~+D@&-E?HH)8mob9Y;f zOqxc3?c)aB9U|gen%zs{ZiwUZlX9HE`4wtxzasved@dw)EzKrl3pI2gQeCrOonbn7 zoWLU7lIl)&?r+*$zIl`8$8Y-k(x7{Z-vQ6kj4ODIm#UP&$c!o-p_bCFYvi1wE_-`+ zHlB8;J{N>xZq7Y9t-(8DAc@}X0N7$ueOlv-ht7Q-`_|0XDXL|!As|9+ZX_*vU2;Q% z89!-?cs@k-%xvhcHWoP9_+b*t47f*?qhtl|{cvY{x7ME6TF!qmeWsK}RPd38-T1Ak z39w`r_A1rf(yBR=Taa#SD>%FD<%;myb&7O5}T^k3Qn~l z!U9>HPmOL$&MgV*^a7T@$CSctnGYQrX0OXj5+KEY`i~MG85z`*JuM6^ksX$}3-K`8 zRdBdOB;}Nx>PTrHkO|`jl7g|DG(Qfd@v0%bmhAr0=sO?(FU1&t*#2&ij1QLEA){|f zAb5S3n(bY&KIs8&Fu1?J7k>hG!@FnKdNPpMSV($VyLH;Z?^ndcHykxfCUvq-w<(XTlc zpuzH+U~^q>i}pH$;p@M4-TbOME(_I61#hC>RF+^Ya)F{ zFzx}@hJ5*4VO*B3cC+*pu5GX&MgJ#HsZVoTZb8z(`^1@?%=~ zp_h~|LIpU#+90X>gGVy+GvQ!fi`30f|C2-$Hd=YVr(<@-x}_gLcPD8RH&>?<#|U_0 zS>w*Dz_) z00vVm@Sm1G9rRCPsm~^oys&I(S~br`j>F+kbrR5RlM1`5rJs?|p-?A+4msUhn*!9N z^Ycet+xo{7CM^yevc|X)FsO=?$CT?k^heBiXQAk zWpB*=CyPx}F8;IaehnC$1>8NU|2Av}SeIjUYK4+GT1AO#6k4h)x>M>s1e#Lw&G(mj zOsueasRZ~56#sNsTlW0>E2iW+nuVmxc4Cpzp4ro+?9m}8wl7_q+Y50Bpm-l| zFe#dHDy4ryh8Znno6?%Ox)#@zT(Cafw!b)?66D2neAeaq$h$jBhXCqQB?+o`jsEy3 z^#=->tH^M10>XMWegnWvf})F}KlaJi!wiGT|Bwx?QOnehaI_a!E-}jaH-89Ne!OQj z-tu3}-8qVluQr_O{UrXMm`lROTnUM6Km*Adfb}M}|Fh3w3;L(JgG9QQ!s#uRtz4rd8F2>}In$6ImWzfyJc{#K?UZb&Ea}y)p z3->#FC`6&_#+K|*&TU0y^R9$Jf+bHCtGS~Qu_tOs*2^LXthQ0_T~?D|EX{ju9aJ}5 zM~z~djyMiob%F&C51`{JxUD$Q+m4KhvbR>CY)b&e-ekQ?klL)+{pFZ(@YKl}q_E;A z+UDlwdJM2O)6neD|3#2C?(9glwVhle$l7eA+Tba#Q5OGGGxztKKh$P7tuCX2*kvqQ zjwUaybhXZ~IB9;$OWBs@KP>vF>52$Wr9bf#<_P5X-{)ixn2|ZRh6x*f2YV?mo8n(! zpN;Y2C%b2t1_%+H5I%-K6VK%ZwMv-;9`|hLhLV0zW^#Y~Uy?sDkiuO`SMjBtu*4~q zn;BM}#`}R0R$RUH?|l>XOP`aG+0i53&3y5yhGxoRed@m#cMMj{=;sT)bra%l0(EW{}nHzbblm~D6>3Gfb^0Fgu zH!&0~6(mYbP-lb)B{0Hcf2*#m`H{@sBDQIkoAXah1h1FW{bNnc%W3cV=D7LiLA{y9 zywS}A{3-MGu0TZU>Ui(oA2^~!eE4=Wt~gVt(w_2;eMN>5#9coz81uwCAiF(We`!^^ zwb#XT5nS*N2A5r*J}x>Z3n8lfgcSNvP%&C zG8-0=iV?IDTAu}Y`Q=|>2Fm!zGcSfOYb<4`&>HZ?Lf12=`QBGc^Ncgko~B8lmiy9#5}Z> z_S4+fc!(oZ8F%Ic@LaqBQo3yVaSrGL6;Zy_aW?+J^EDPs6niYCW3pWX9vrHwe8_FG zqx}z#dY7E(<1L(kkV5>b-#vcmTtSWg>kn_Y)L)_X56&=Y=!CQG!V`o}$-3_1wXPGv zX!6nH`G)dIeI{g1lZ5ru*uziy@wbCrg#@h+szlbL69d@r=Du>MjANlIX%};jhyPQC z5Arqflp1TvP)6Ere6!;*YaFnS5QI8C&i&WRk}cRU((?c|_3JPDlC>D&$IeQA8Fb zOVa$tOWX3}&-mqv9}(xgWv7KZnS0V;WTU-C0`G-P_t@|0W``%T?i@+aU(q#O7EqrS zs8i*MMnF&=@k-!9zO30b<_mkte-1xiHoE#uXU5lCF2(-PFvA%Rrp$N^sjX2PohpAs#@4uJ<^H1orAam*^ ztp6VW7c=)N-Gfqk@}$tchd|oSLutm*%d?qNq^#_IHB#n(Iu4-Q9lePv>%rd^UC6%M zY*=ql&_PcmEl>7p5v}UfaZ6(iY9f-M9{08B99$=q)0-wcxc&r&i=r&iI1;R|24vX&#nP4W>0=&zpRv; zyjB!Mb1X62HUTI<_1Es~ND+_9KXu6m89q=>a-zF z!r>Vk_+zD2ngFqKXF|+}G#|UV<%DKe%dLq+Qs|G@r#SIN=Q1I_=24BmCxPXsl8FPq z1ywh3FR#bxu9J&LUgWMeRGY#pWnhwy-UB~ZpA=PO&!j~j9`J@aJ-IUKt4*xi$BuNV z6lQyTm~%%kPy8z6Q=J9{OP}Q*MN*#5G{zVgQ2Cc$rw@E^0UBdyg1z6Uur^mRMgQUo z%pYBm+1zm#9==}AgNe1cMOge=^p#x}lXSDCS%N4^saH&$KvZ(atr0bh?tybFd5qqM zMX%4)CRZDD)()>4Ny_{zgihSs)UqnPePwL@4NC1bc;Wy2&PoScvWt3FS-+XKAw7Xf z9H1Yu5aA)uPke|1Brjuq;BO}?*!rJy)Qqs+4iqvo=XcSx+5fT>si07bcydF!i?K^6 zuQWORm;L?_&4w_XFT2~Y3%K;aF#`0rSML~B;RW~ z%%I{;6fF~a-Wj)?KGxDy3Ry3}`_1ba`h^Uols-`SQhzGDP=Gf90Yn=iknEak`jbOI zS2b?}QCrI+>kmn7sHRM7>uML%;E1XdW*Mhjo!4mnuyPy1TM6G0r9h`jy&m*>Mnl#{ zh=sGxG}RV@`I2Gy?|uwC<4;_A%esaQ_%_}AUL?RZQ#5--L59Eg+o_e#mU~+*4o~mO zg$}Vn0of^~QEc6uc4e$}#1(<|G6PVS1YVz=Al6n`X5vt^l=X0=vnpsQOJz4x=UCyk zxlxeC`DHc?XZhaSp+y60J!@^ft6(XOyPa=x)8tq-@fH81^SkAo#8TGs5Mw-h)Ilre z;H-n}P9;;!)l}l{Qg3cL^{7}iQxRJp^gJDz8_fUCvPm!9Lj-WyZOBhZ4vOD=+Bb4< zkEr}0ODlIaR$?$8OsrcQ-jfeXSnxM0nx>x>MTjkhpT5HO=_s(%&65&h3CRS-ai}Ie z6~Mwk?{&801~TcDQsv#NpdX;is+-{|q0)tJ1|klvO}sQ^Q}?8&d;4F=F-Mcz0d#rAsSSOAJd0x9 z*Io_%Jb{vPp)YUOTaU_|H=q8~u4ohKJUbZhAyTsAuTG-;gZque!XT=LCEoRp4% zI(Z0|hoq{7IDOcn>D(DRc^qiNxRfl?eXn;Z`D@A=>(4$LL>B_%*D`BNedpwhjD7dn z(_dcC5e6#u4~s2j^EIqJZL+@52)@?N6kG*wgOOb+zfx@qZ?%X?WG@(CTZ0Di}mkLdQnIukK0Vy@REt82KNh(0B7xiw!xV_}FVOaW+mk#$^LYw*=8 zyTxKDX7gDvqi0SZs{VkZP-iy0{8hxu*SBdwA25CCD4W46$xqx=gDc66t#_TTcCY8F z$FC=*Klt(+z+6m*(7dBtUnTZ4XFYH~{KS)1iN{>YQ{owFeNeXhQ9iKv!bw81X#ccm zc1&@`way63qoXLb4kjdVmP~H9(@+9KdrcE(a~hgqLp$~QlSzqD{-4KnB5MIwFW#1V zK8d~1-x%3%qFmoCSE|KGTwsE~xh|^7m%n?yHOCp- zN?9)A`|_JuFsgZAVZZ<)$p@7R2xe9P(dn*IkrV3QLjSO)8pg2#F$!L9?Td0 zZf6WW`zPinuhR!m3S>ljX+zM*?dx^~2QM`$-F|5#Fr( zk%Vz7JFIH_a}T>@>;v`6exeSkf6wyhN=_tSz5MCuu~cRGo)tC=YLZvnmB4+))3nL4 z+65Vb?mby{Rs=x*1$A8f$D~xT%?8_KO8UC^UsWQ0#_%rPKv86+A4M8UL|uMaGiccxF(&3?rjCuGZ-F0Z<@x z7N8j9r)tInwo2Uq#%B-I8y=AaT8r8Rw8l$J&6mQHFW@Z?%3gQAkg>GyVTa_VgnySr z`e%2&*-~1no2@IACRFEUK+Px%(hJ%nv$s5VUP}#aE^zT}dVJ#LFm08l8+YW~O0ZBP zkUIz~iUT~LNxqrm4qCIwXN(yvU|Y)CYL6CM#ul|2#3%hyFixBB=<$^Qj^O&B4JZCR z zJWzLTCt`qm4s-o!PJB{)^bwS|8aw?g($XC`&d}D%KpzzI^~uFh(nply-K6+J{^Ug~ z5?+%a#j9}h7(?glj9luR+&I(xmQ>k0BhglL^!rlK`B)n2W}r%+MYrkWhWR-7+BtWC zNAg_lHUc+A9Vh^*U3*1rJu(YQZ%i!Fec!?ZjHb7#{tb}O$)h6-mxfyWp^)7jZM*95 zAw0lcB5T_*+8Y@Qa?$#vuB0VEF;C0h7m|qLe5McR{nvk&M;=sTQN;i!$9Oz;b=Tf? z##%T6eZC*I3J%ocIy6zi;;g1yy;8+pQ`R^2ObKR*PaGb*=0FIAa;4AzYH#lpm#e!5 zpim`!f%}f2EWW0jsv4(RA?~W?nG@U?i0EHOy>~b`FG}CPmHV`WU7U@tRDD_hvj<=> z-Kb03O-EEvB=%cSuNp4184jcMc2D6CoH{P;`|=>BJv$%K6Tf-bP>Wm&^^bjr6Es@t zc_d$UWc~YxpevR-vH{CMyOT zGrn-A1+*~_?w}IFmY{l;57zV&O5 zrlGUD>2H{Wxo>qNEJpUknd0$leVOYm!}btOIxH?M^i#*YZd0|LMOs$Vdg|sBhP+q8 zL4Loc^taXE!r$btISwmSKI(kI<~6B#|q8GyC6q6PdcX> zf5=S$yu`OTt$zt%deCOyj7QQXdyjMD+8330Lse<6kB)!U=9^eAeBm*1ELB(T!pVZP zY?24>`v2!&cN|VlICp}wv{hnOq-(@|x%KJC(d+AJAeiZC=Je0mYTH7w)?>Bp0q*j5 zDsVsC&0EvU6iY%YjER z=U3DQ45Pkb4z=ad-!52+VgdfKDu7tT`MzvGf!Er+nz-h3Xam<4Pp;Z^I`y}W+^Hy} zf^Pp9Oakm^a$yi_BbcUT{P6+rSqTzoWpZtNqdig~jk`?w0<&z+5AT%#W1hK&?icoo z^)EgK^M2pwy}NyA8RobycZN(1Wh09nMfhbwm>*n(pLJEv4}5J~ZRY(0m?~RsIG+UE z1&J{syFe*C{$H^TVOXF6W6TPkpbX9|y%5<6Tb~KGo_T`33KIg|UyJD{^`orYE^5mQ zv3RDTGjJJF9)Y|n$UVkQAoQ6WC5#b0aF(KdnjRr?^B3c?Y^@1o3fJ)~A>`mnx%gYW zDUUimmo7H_{iL^Ozw0Y%D!a;i%ND2R$KR_-rh#Ms)>zjOkDH2B(anyve2}g2n$4L* zz=XRj&5t4O)Qw5T*O0iO<6FeY2S*g>f#JFh>E%70Ayr1}rtE%*Z1$k&ykXmsmUIax z=Hz=H>iQ5&EP4I(Rk`uvq-t?5WruJw{HcmP9kdmDg#W*u& z+O7ya$6zU3RFJ57rHA;M*SkUh;AY5;A26uD__??ETOj08Fg3_Q0;YW7;ERpBD(ZpX zh1hCm?^l{Csh+t3@e4ljR}Nq=@~$b(@TpWf3dq5&i7=mDuWdX+*%)p?GM#7*sD?JN>0(!+7G z2&cjda!OnJYm?4gFbP^U%4)yu~k1=sv(QY7eMN)3c?W$ z1LJ9s-fFt)PnGvjMlDn6ereeA;DLx^lC+stqE2p>KN@wMbA%-;AXZ6v|7~tXrPG!9 z2dlSiobm_saRo9Jx587GvaErW$FhxfEiHaLWD)mb>Ee$)_RR>TWy)5;Tl{YIEJy`V z-s^z>6Kv2LE}6dNU^yH4i_mPM<>dLfI-O$O+0fv{yNi01tx!UbUb?Yc$D?gDddc;bGvpPl}dQ?4Nis<_@Tk3(Y!u{+R)p2ws|# z5t%=*-wA>d?oEwIF#6+n!`Mo{Q<@%;T7xi5>gk?#@fu>=VS6@I`NU44-jx=9rA)e4 zcs9~i|6W*BALT3F-dp-4rhC5rNmjR}fs}`g)5+z|p5@3*hWQ1P`$_5}-k9Nlp!Cg& zl&yJ2N$?<&;`H}a0BgcVrOSLE-ur4+Kfrgmtgq;dBLelp_e(bZ_Qo{0;ylhK8$OA7 z9_C61BQbci-8;pN@Q}Q?RIs^6w34H;=3bu7rpIq~fR0RoFal7XjsJhQHLxHsv_||6 z>dS1!X2sZt?HlwQ`C`xKb|GAS+SpS~V!vi$ycv8t{y$hSiQN(}?7Qdsa%Tv(0E1@~sTCZql^=LOAiz-tZ8$ zY$@0M>|u^Uw)AlX^a(bI{{D1)>Nh1MSq2-PUJ*V>6DD2_+FOQ zkNiDrwX}&~tFtyf@BnmLjF-LI5Nzs<3Ys5?F01{}c9y(f!1^P8w&M$=+J0MjgOY5S zGNQh5>Dx$*`d}*O_&s696CmZd8d>*0TR7Ngo}1NI;V(XCJ9i@>#EAq*KVA%K`SOG0 z*KypNR*H@`l%JD_Ki&9#X;#8|$IMbx)Fbjcz+iE;gZdkU=c!m8ZNlaTVeK^S!xMWu zP>D_qlT3)T*~tJ8KV9_Ie=q<2hgPoZ;J563+u^mlIPC`@RvOWFot47gO@HQn}ZD(!&o?JXA;Q(NqVLm z>&f^&0y6T0f#18+cgF2N57e>B+d8;NLT;MD^a1twLo>$_Ri7u+Lg0x+^UlSC$K|i3 zMTRb2!|!Y(?1fEt;Ng}u;N(ftxBuFpOe6-rRbSKF|7fsJC9uLHj$WbNf_=@#bxIlI z*E>Eheu*MeXvJUNohmyX$&;Q_Q|+9w0t^~g#c#g%TaHP*Nodw)xCbpCem6{`__qzq zd`ukPUZ&~KvF7&}WN*6O3NJ>jU;c!-x+{7l={nxk#w~edmG4^2 ztoDdbDF#RkYCY3I)eVY8kB-Y+*SdGF^noUj2VfrN^Ge3inC{9R*`_KwISZqWS+`{0 z-{JK$!c<0 zew4Nz_*R2`M6O^|YTha>$Cu?u)4#UQtX3gyV+hz95oSE%wyN!8oc@SFsCAuoz-?6A1DSq6Pk%IE5+*bBa7HM~6ElZ1_ z2Qihth&8#Bu{j!Ke#CGT#84@KM0lBD9IRam@=_0k*HVQa3D8AGSktFh*F23~4Ob3q zvsq2e-|F>J=M`#iPj$#i{tkEJ^!0N=)|8s)Y2Y#yj(8$F)Z$PU*K)vM_OuH(Z;R>3 z`YAc7H<2VLDEa>|W4JlayczxZ@yp3^$5=b6>e z_M~g^s~zg_VX7I+^kgDv)Rkf~j1ydwznBRFZPeQ( z4iAQ*BKx|}AF0xM5A4=u?FgS46_*!p=oor&{mo$V^EZ|fX6uS1QkDM1vGY$S4c zEwl=0bx_lJ`cD^;m>aZ<%C9CCXcAvzgvE{s+B}HRY$y&Ia9}?i)80qB!+46c^lE?K zw%|#{amUS_Uu?R=$DN?Dqe(<$$9=u5itX-+Bvgj&_3W)NKR1VQuRKX0t4B3EbIj|e zYqT?~sQ_T~IH2~|ZG4cPY;N@$os#eJKY>MyWmJM5LyDx!_ej@qBM%Mnhc9*HWZ#{y zt?#B*3m%)@T5RP#v&NDfXF8>rIp8bLPpkZ8 z*vcn9$n1->Y<}tYFP7sE9Y$_V*DiFG�^D#))y3@_I=Pj>6Yop+U9)vb|+`Kq+_dKl9r~+*Q0>dyUa1Ox%enC9%=1Z3xX&Tp45fz zBh)Oy(fAJHs@dCNNylx7ln%O{{sKs_mc<0`J)SH~ZAZ!7nP!7Z)`F=*L zl5crmb|tG_)#08k+z~!k$dMD1?^F6Eb^qmxl|tUM^cLMHji=8hnx`U#9~>wfkc-|=(frJ4lUisE6|d1gNPim_7nL4T#eJ*7qu3{` zUeRF7vMhJ#1}Wr+%mu?qXXH6g+)9elvoBbNhD;X53xI15{bx$%gN<6*}utCFiZk55y3U=s%@@ALyS_` z9n1cG)5stP{Abm%f{-4fhlIGG`_hJKuSkP4mlLQeEUC$TunRA~2(Qe|!7VFZo1D0a zhEPt{{H(! zF`iXLIfDqDalzhSbjf|nzi{@4WGHE{E(NQh?Bzu!Z@&ex1NVke<4~`t;U2A*kV=|& z*Xzv4S&ZR!&RSyS-f<%@bB-}4?GAxXHlnw_PnI;iFb|v+ZAh?N(a@(IQg$iJbhgfq z%i=1vmbG+hOX!|3q$~1=)=)v25T0L4ucW#jlYy1vciRo%;3RifP7nd4&%5C31G>_P zuZ7A8K=7g3;Y5?@gEDeecI;XjdR-!yIyz7CqZ)C`XJ?8}Rq+ zY2Q`8YMPGhIj$H~J+YV(Z4BEgI^8xgnqu{Fbk2$;0xWxv0jJ?<6iQP53;dNw!?ZE8 zATu>pco8pEPO{C1Dw<7QToc4vJ=as|cso$Th|yiWBniG=xPmOO8w=q+Q9HOR`KTvM zj)X_({TvCWlGWln(Ju+1xoV>jDV31Aep1rD&%y$sA5UGI*Klt2%tCldEvY0Px#v56 z6)BPf-&pDz1URYZJZO-SEuN2_R%i^=J>H2s(kJ%7u*qri7;`PTn#{04dfNGPlcU8G z1~}SN4oAUIrD=`;c?CDXhI#t4@sz{;{U>NGC{+z0!S9go`EYr}ZsVQy1ipA72ql7w zropepd2Isefd^{t&@*}`-t!l#gG*W#pZ;=DQ8pFhsXlREmr&omfizb zt$R}5R(JDO%giSXuQylDDy&r`9_*CO4%kGg*#^6(&2~7_9XN*&^n$)%V+t@YpBt*g zPh(c?KIVIY1Zqo7vkg%Ui)%8L?b)_opNdRcWpQpAu6E>JPY+yC$@CY{KyQ3_>IyjA zeqTH+fglGGF7vY_!811Ge9S>kkR1@gO@0*M{o&CF{%bck>zib z@RRl^9ZpJa0L!~o2?4fNF7Q>`(o9ULT0F?hPH={D8JVG6CnhkU5hbVcO1p6$StVYINm>-YZVAC+zU|2umS_U0O zZ#^0qVn+=Up#bR2 znVvnX@VQiH3inSZDq%5|-(j4**SnZw(-EvWP){V%A*Rtldgm>P<r7iA&3s4Y#y7f6i4LT|h2~ z(pksEw0MQmu91c>AAece-=A7e+N_C(cJa0#&fU4N~Rt?uJs*fM*m0 zI}Ul1Bozd?7kL0Hf#b<)-*G zdms3siSW;ccCsgsIY{k5AF4EQzTEB}G&z>{agp_p;9-mV;dMKmW+2AT=vl#G}I(=p=)o=@Kow-DZHaM`^cf_z@PW1Pxlr=WPR&23XPk8z3I`TiC3z2 zv}45iV9|wZM4TRy@7nvE%Bt$z1-jWYTKh!LX-Yi_7Y1*>%ofAK187-jaR6(cz1(|e zvl`sc#yw4?B<+DI^@9)Y+%8c~k#;wmpw?VUWq+opaIYWjaIjtud>)m}QILM;XIrNB zL;d&ex~u{x{>HCtUG3gweyV5R!!{$<*;y}4j3%tpNi09GBoKELo=GbqPV@Xm78?!H zFV3e{lZZB2D7vGcwrw&X#wic&dVP5A2bc5yqK}`7J)g_^v3xS;_D!*Mx;e*X{ z!9mvRWDD2}u3B|@gwo58*t33J1#!8>YXJH-c&8qvwLWH*JYq?Bfsm0RKz)wtlS4W} zbGORX{p~L!!DKn#M{}d;9>^)-Jr_%yZbE#|`hA(#yS8Hpen@NOiuK}38xO>s;ibol z8NSod^m3~?-MgjF

aU$BnP+#I4Xat&k|Frsy$U0-XK@1bcQLWJ>63ZbGxzonS1-TtYjHHCGu0@?a5 z6Fh|o9dn-}K+=NG)RbF}{GRnOw zbY=w;c4Vlsv)Zj1g(<%w{Z*|_|52L{UbP#QPbR&d6B3|tEq{33a|Ha4l75(XJ0fGj z^Mm6Q7L)W6rV>*zZ;YKEwXq=|ccWA0Z`q3=Y8lo172s*sQ>i}} zHYUc3Y^I`WAGYn3haBycKWx+KB3@j#Jh} z(Ntt&YYQXCAo2BtWuz zDw5Oo3wC%K8I43MW@G2U^%WDF*P6Ca3#KaF0kCEDO5sT4sDT+Zk_mAH+z-8bpyhG& z?)L8#KaqO>&D!=TveI?z;Y2+!lcoKiWe zslHrHw0}oSop>KtF*coWR-GrXw2V2@Tou(7#RR5sx)GU40zA+8*d){-AL046&- z;cgUKu-(hc2Ab$k$HO9*ZxNApPu1e#kbXmxNNaG(HtpLshsq!o1fuOo-A6 zl<@}(V+^h4m(Pa*efe9lzeiM(Ay3_cTgd8l#O4+o!W;9x^3=sBGQfSU`k->xT{(n;21hs!wZG8AjZ zyQ$@+D}R|2AQ*SqG1+j#?3ecw=WFUS&-+V)ubPaA3tsT3WQTSdk$)T&$HOkRURMo- z6a!)lBZ3Oz*dW+UQhk152ecb{dnVpuF*_77IBj*U(Hc8DZS|YkW#}Y`=hs$hVoSWp z{i}N}+$nW?zI$*2xvJ;)Hc#{{jneQe2M({{{R4;$MGOpv$v+2XA0SOQ?fPw zh7?dggX_ty;~T;X>CMj|qk1;Kq5($S8v1|-CEa;{DB>P|cx_2HJ9q6tn_I2#+f(87 zZ(Jcy;(jslyQ=>F^t%?``*xU@%BFf zTkYq{8s;L`)0>O83L_sz&WSxR{L-*x10SsgXeQMpxYDg>k9#T-9|e4}F-fZ^v)j4oe{Xl8~U_C>Bd zzu8e)b*&E^fpNFJ3`PG|GDL~kUYjc84h(-Aki_!dYA6r|xtHJhC*hB|o1%{=@c{mP z2+YILOCk>{g#O+epnID3HU$TgupI6iV$t8j3`3QOv;Ne}|0N7Yybj&?1b~$9QbGou zrVNtN_~pC*P8E50y;y*>2L7Y?<{M&1JWke^Zt5R!H&GD@hkZBXI!i4@>=RPAZ8fszZ)VL4p&7Ea1g|Lxma zglcK{|5uaOaV2LCx40o(eX2Fpogq|5c6f3u66C)#t8`m+D|Y|uw_MFXbq7r~Mk6#u zSt=Vzc|Lx%l;fibD*Mj5HjLA$y26Ck)b1N7=Ekk{$1&nE+(tnorG;FU0kY z+i*$b@Ok)b Sp!EvyyQ^uSQKV-7;(q`RmVed& literal 0 HcmV?d00001 diff --git a/augur/static/img/notification-icon.svg b/augur/static/img/notification-icon.svg new file mode 100644 index 0000000000..10946c98b3 --- /dev/null +++ b/augur/static/img/notification-icon.svg @@ -0,0 +1,80 @@ + + + + + + + + + + + + + + + + + diff --git a/augur/static/js/range.js b/augur/static/js/range.js new file mode 100644 index 0000000000..029e803104 --- /dev/null +++ b/augur/static/js/range.js @@ -0,0 +1,3 @@ +function range(size, startAt = 0) { + return [...Array(size).keys()].map(i => i + startAt); +} diff --git a/augur/static/js/sleep.js b/augur/static/js/sleep.js new file mode 100644 index 0000000000..535240241d --- /dev/null +++ b/augur/static/js/sleep.js @@ -0,0 +1,4 @@ +async function sleep(timeout) { + // sleep for timeout milliseconds + await new Promise(resolve => setTimeout(resolve, timeout)); +} diff --git a/augur/static/js/textarea_resize.js b/augur/static/js/textarea_resize.js new file mode 100644 index 0000000000..624d004f40 --- /dev/null +++ b/augur/static/js/textarea_resize.js @@ -0,0 +1,12 @@ +// Create auto-resizing for any textareas in the document +const tx = document.getElementsByTagName("textarea"); + +for (let i = 0; i < tx.length; i++) { + tx[i].setAttribute("style", "height:" + (tx[i].scrollHeight) + "px;overflow-y:hidden;"); + tx[i].addEventListener("input", OnTextAreaInput, false); +} + +function OnTextAreaInput() { + this.style.height = "auto"; + this.style.height = (this.scrollHeight) + "px"; +} diff --git a/augur/tasks/github/util/github_api_key_handler.py b/augur/tasks/github/util/github_api_key_handler.py index c357b378b9..6bba5764cf 100644 --- a/augur/tasks/github/util/github_api_key_handler.py +++ b/augur/tasks/github/util/github_api_key_handler.py @@ -1,4 +1,5 @@ import httpx +import time from typing import Optional, List @@ -75,8 +76,16 @@ def get_api_keys(self) -> List[str]: if redis_keys: return redis_keys - keys = self.get_api_keys_from_database() - + attempts = 0 + while attempts < 3: + + try: + keys = self.get_api_keys_from_database() + break + except: + time.sleep(5) + attempts += 1 + if self.config_key is not None: keys += [self.config_key] @@ -91,6 +100,8 @@ def get_api_keys(self) -> List[str]: # removes key if it returns "Bad Credentials" if self.is_bad_api_key(client, key) is False: valid_keys.append(key) + else: + print(f"WARNING: The key '{key}' is not a valid key. Hint: If valid in past it may have expired") # just in case the mulitprocessing adds extra values to the list. # we are clearing it before we push the values we got diff --git a/augur/tasks/github/util/github_random_key_auth.py b/augur/tasks/github/util/github_random_key_auth.py index 3051cbd123..3a1e8bec00 100644 --- a/augur/tasks/github/util/github_random_key_auth.py +++ b/augur/tasks/github/util/github_random_key_auth.py @@ -14,18 +14,12 @@ class GithubRandomKeyAuth(RandomKeyAuth): def __init__(self, session: DatabaseSession): """Creates a GithubRandomKeyAuth object and initializes the RandomKeyAuth parent class""" - attempts = 0 - while attempts <= 3: - - # gets the github api keys from the database via the GithubApiKeyHandler - github_api_keys = GithubApiKeyHandler(session).keys - - if github_api_keys: - break - - print("Failed to get github api keys trying up to 3 times") - attempts += 1 + + # gets the github api keys from the database via the GithubApiKeyHandler + github_api_keys = GithubApiKeyHandler(session).keys + if not github_api_keys: + print("Failed to find github api keys. This is usually because your key has expired") # defines the structure of the github api key header_name = "Authorization" diff --git a/augur/templates/admin-dashboard.j2 b/augur/templates/admin-dashboard.j2 new file mode 100644 index 0000000000..a24829c99f --- /dev/null +++ b/augur/templates/admin-dashboard.j2 @@ -0,0 +1,178 @@ + + + + + + + + + + + + + + + + + + + + + Dasboard - Augur View + + + + + +

+
+
+
+ Dashboard +
+
+ +
+ +
+ {# Start dashboard content #} +
+

Stats

+ {# Start content card #} +
+
+ {# Start form body #} +
+ {% for section in sections %} +
+
+
{{ section.title }}
+
+ {% for setting in section.settings %} +
+
+ + +
{{ setting.description or "No description available" }}
+
+
+ {% endfor %} +
+ {% endfor %} + {#
+
+ +
+
#} +
+
+
+

User Accounts

+ {# Start content card #} +
+
+
+ {% for section in sections %} +
+
+
{{ section.title }}
+
+ {% for setting in section.settings %} +
+
+ + +
{{ setting.description or "No description available" }}
+
+
+ {% endfor %} +
+ {% endfor %} + {#
+
+ +
+
#} +
+
+
+

Configuration

+ {# Start content card #} +
+
+
+ {% for section in config.items() %} +
+
+
{{ section[0] }}
+
+ {% for setting in section[1].items() %} +
+
+ + +
No description available
+
+
+ {% endfor %} +
+ {% endfor %} +
+
+ +
+
+
+
+
+
+
+
+ + + + diff --git a/augur/templates/authorization.j2 b/augur/templates/authorization.j2 new file mode 100644 index 0000000000..d792fa3d69 --- /dev/null +++ b/augur/templates/authorization.j2 @@ -0,0 +1,52 @@ +

Authorize App

+ +

{{ app.name }} is requesting access to your account.

+

Authorizing this application will grant it access to the following:

+
    +
  • Username
  • +
  • Your repo groups
  • +
  • Information collected by Augur, both public and private:
  • +
  • +
      +
    • Issues
    • +
    • Pull requests
    • +
    • Comments
    • +
    • Commit logs
    • +
    +
  • +
+ +

By continuing, you authorize this access, and will be redirected to the following link:

+

{{ app.redirect_url }}

+Make sure you trust the application and this link before proceeding. + +
+ + +
+
+ +
+
+ + \ No newline at end of file diff --git a/augur/templates/first-time.j2 b/augur/templates/first-time.j2 new file mode 100644 index 0000000000..c8eb284da8 --- /dev/null +++ b/augur/templates/first-time.j2 @@ -0,0 +1,211 @@ +{# https://www.bootdey.com/snippets/view/dark-profile-settings #} + + + + + + + + + + + + + + +
+
+ {# Start sidebar #} +
+
+
+ +
+ +
+
+ {# Start form body #} +
+
+
+
+ {% for section in sections %} +
+
+
{{ section.title }}
+
+ {% for setting in section.settings %} +
+
+ + +
{{ setting.description }}
+
+
+ {% endfor %} +
+ {% endfor %} +
+
+
Gunicorn Settings
+
+
+
+
{{ gunicorn_placeholder }}
+
+
+
+
+
+ +
+
+
+
+
+
+
+
+ + + + + + + diff --git a/augur/templates/groups-table.j2 b/augur/templates/groups-table.j2 new file mode 100644 index 0000000000..ccc0e2a3f3 --- /dev/null +++ b/augur/templates/groups-table.j2 @@ -0,0 +1,27 @@ +{#% if groups %} +
+ + + + + + + + + + {% for group in groups %} + + + + + + + {% endfor %} + +
#Group NameData Collectio
{{loop.index}}{{ group.name }}{{ group.data_collection_date }}TODO
+
+{% elif query_key %} +

Your search did not match any results

+{% else %} +

Unable to load group information

+{% endif %#} diff --git a/augur/templates/index.j2 b/augur/templates/index.j2 new file mode 100644 index 0000000000..89cd6734c3 --- /dev/null +++ b/augur/templates/index.j2 @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + {% if title %} + {{title}} - Augur View + {% else %} + Augur View + {% endif %} + + {% if redirect %} + + {% endif %} + + + + + + + {% include 'notifications.j2' %} + + {% include 'navbar.j2' %} + +
+ {% if invalid %} +

Invalid API URL

+

The API URL [{{ api_url or 'unspecified'}}] is invalid

+ {% elif body %} + {% include '%s.j2' % body ignore missing %} + {% else %} +

404 - Page Not Found

+

The page you were looking for isn't here, try clicking one of the navigation links above

+ {% endif %} +
+ + + + diff --git a/augur/templates/loading.j2 b/augur/templates/loading.j2 new file mode 100644 index 0000000000..052af79eab --- /dev/null +++ b/augur/templates/loading.j2 @@ -0,0 +1,14 @@ +{% if not d %} +

Uh oh, Something went wrong!

+

You were sent to this page because we were loading something for you, but we didn't catch your destination.

+

Go back to the previous page and try again. If that doesn't help, submit an issue to https://github.com/chaoss/augur .

+{% else %} + +

Give us a moment!

+

We are retreiving some data for you, and it may take up to a few seconds to load.

+

If you aren't redirected in a few seconds, go back to the previous page and try again.

+ +

Redirecting to: {{url_for('root', path=d)}}

+{% endif %} diff --git a/augur/templates/login.j2 b/augur/templates/login.j2 new file mode 100644 index 0000000000..c71d02d50f --- /dev/null +++ b/augur/templates/login.j2 @@ -0,0 +1,155 @@ +
+
+ +
+
+ + + + \ No newline at end of file diff --git a/augur/templates/navbar.j2 b/augur/templates/navbar.j2 new file mode 100644 index 0000000000..fe498548a9 --- /dev/null +++ b/augur/templates/navbar.j2 @@ -0,0 +1,67 @@ + diff --git a/augur/templates/new_settings.j2 b/augur/templates/new_settings.j2 new file mode 100644 index 0000000000..74a14ed575 --- /dev/null +++ b/augur/templates/new_settings.j2 @@ -0,0 +1,347 @@ + + + + + + + + + + + + + + + + + + + + + + Settings - Augur View + + + + + + {% include 'notifications.j2' %} +
+
+
+
+ Settings +
+
+ +
+ +
+ {# Start dashboard content #} +
+
+

Profile

+ {# Start content card #} +
+
+ {# Start form body #} +
+
+
+
+

{{ current_user.id }}

+ Delete Account +
+ +
+
+
+

Update Password

+ +
+
+
+
+
+ + +
+
+
+ + + + + + + \ No newline at end of file diff --git a/augur/templates/notice.j2 b/augur/templates/notice.j2 new file mode 100644 index 0000000000..46ed7ead66 --- /dev/null +++ b/augur/templates/notice.j2 @@ -0,0 +1,6 @@ +{% if messageTitle %} +

{{messageTitle}}

+{% endif %} +{% if messageBody %} +

{{messageBody}}

+{% endif %} diff --git a/augur/templates/notifications.j2 b/augur/templates/notifications.j2 new file mode 100644 index 0000000000..b59c673391 --- /dev/null +++ b/augur/templates/notifications.j2 @@ -0,0 +1,79 @@ +{% with messages = get_flashed_messages() %} + +
+ {% if messages %} + {% for message in messages %} + + {% endfor %} + {% endif %} +
+ + + + +{% endwith %} + diff --git a/augur/templates/repo-commits.j2 b/augur/templates/repo-commits.j2 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/augur/templates/repo-info.j2 b/augur/templates/repo-info.j2 new file mode 100644 index 0000000000..311daa45f7 --- /dev/null +++ b/augur/templates/repo-info.j2 @@ -0,0 +1,128 @@ + + +
+
+ {% if repo.repo_id %} +

Report for: {{ repo.repo_name|title }}

+

{{ repo.repo_git }}

+ {% for report in reports %} +

{{ report|replace("_", " ")|title }}

+ {% for image in images[report] %} +
+
+
+
+
+ +
+
+ {% endfor %} + {% endfor %} + {% else %} +

Repository {{ repo_id }} not found

+ {% endif %} +

+
+{% if repo.repo_id %} +{# Wait for cache response: + This method queries the server from the client, asking for confirmation + of which images are available on the server. The server will asynchronously + download the requested images as the page is loading, then once the page + loads, the client will query a locking endpoint on the server and wait + for a response. +#} + +{% endif %} + + + + + + diff --git a/augur/templates/repos-card.j2 b/augur/templates/repos-card.j2 new file mode 100644 index 0000000000..04e4ed3871 --- /dev/null +++ b/augur/templates/repos-card.j2 @@ -0,0 +1,30 @@ +{% if repos %} +
+
+ {% for repo in repos %} +
+
+
+
+
{{ repo.repo_name }}
+

Repository Status: {{ repo.repo_status }}

+

All Time Commits: {{ repo.commits_all_time|int }}

+

All Time Issues: {{ repo.issues_all_time|int }}

+
+ +
+
+
+ {% endfor %} +
+
+{% elif query_key %} +

Your search did not match any repositories

+{% elif current_user.is_authenticated %} +

No Repos Tracked

+

Add repos to your personal tracker in your profile page

+{% else %} +

Unable to load repository information

+{% endif %} diff --git a/augur/templates/repos-table.j2 b/augur/templates/repos-table.j2 new file mode 100644 index 0000000000..9c08224bde --- /dev/null +++ b/augur/templates/repos-table.j2 @@ -0,0 +1,95 @@ +{% if repos %} + + + +{# Create the header row for the repo table: + Here we dynamically generate the header row by defining a dictionary list + which contains the titles of each column, accompanied by an optional "key" + item. If a column definition contains a "key" item, that column is assumed + to be sortable, sorting links for that data are generated using the given + key. It is done this way because the client does not receive the full data + each time they load the page, and instead the server sorts the full data. +#} +{# "title" : "Group", "key" : "rg_name"}, #} +{%- set tableHeaders = + [{"title" : "#"}, + {"title" : "Repo Name", "key" : "repo_name"}, + {"title" : "Reports"}, + {"title" : "Commits", "key" : "commits_all_time"}, + {"title" : "Issues", "key" : "issues_all_time"}, + {"title" : "Change Requests"}] -%} +
+ + + + + {%- for header in tableHeaders -%} + {% if header.key %} + {%- if sorting == header.key -%} + {%- set sorting_link = url_for(PS, q=query_key, p=activePage, s=header.key, r= not reverse) -%} + {%- else -%} + {%- set sorting_link = url_for(PS, q=query_key, p=activePage, s=header.key) -%} + {%- endif -%} + + {% else -%} + + {% endif %} {%- endfor -%} + + + + + {% for repo in repos %} + + + + {# #} + + + + + + {% endfor %} + +
{{ header.title }} + {%- if sorting == header.key and reverse %} ▲ {% elif sorting == header.key %} ▼ {% endif %}{{ header.title }}
{{loop.index + (activePage) * offset}}{{ repo.repo_name }}{{ repo.rg_name }}TODO{{ repo.commits_all_time|int }}{{ repo.issues_all_time|int }}TODO
+
+ +
+ +{% elif query_key %} +

Your search did not match any repositories

+{% elif current_user.is_authenticated %} +

No Repos Tracked

+

Add repos to your personal tracker in your profile page

+{% elif activePage != 0 %} +

Invalid Page

+Click here to go back +{% else %} +

Unable to load repository information

+{% endif %} diff --git a/augur/templates/settings.j2 b/augur/templates/settings.j2 new file mode 100644 index 0000000000..80114039a9 --- /dev/null +++ b/augur/templates/settings.j2 @@ -0,0 +1,439 @@ + + + + + + + + + + + + + + + + + + + + + + + Settings - Augur View + + + + + + {% include 'notifications.j2' %} +
+
+
+
+ Settings +
+
+ +
+ +
+ {# Start dashboard content #} +
+
+

Profile

+ {# Start content card #} +
+
+ {# Start form body #} +
+
+
+
+

{{ current_user.id }}

+ Delete Account +
+ +
+
+
+

Update Password

+ +
+
+
+
+
+ + +
+
+
+ + + + + + + + \ No newline at end of file diff --git a/augur/templates/settings_old.j2 b/augur/templates/settings_old.j2 new file mode 100644 index 0000000000..f62a066ab7 --- /dev/null +++ b/augur/templates/settings_old.j2 @@ -0,0 +1,140 @@ +
+
+
+
+
+

{{ current_user.id }}

+ Delete Account +
+ +
+
+
+

Update Password

+ +
+
+
+
+
+

Your Repo Groups

+ {%- set groups = current_user.get_groups()["groups"] -%} + {% if groups %} + {% for group in groups %} + {%- set tableHeaders = + [{"title" : "Group ID"}, + {"title" : "Group Name"}] + -%} +
+ + + + + {%- for header in tableHeaders -%} + + {%- endfor -%} + + + + + {% for repo in repos %} + + + + + {% endfor %} + +
{{ header.title }}
{{ group.group_id }}{{ group.name }}
+
+ {% endfor %} + {% else %} +

No groups created

+ {% endif %} +
+
+
+

Add Repos

+ +
+
+
+ + + \ No newline at end of file diff --git a/augur/templates/status.j2 b/augur/templates/status.j2 new file mode 100644 index 0000000000..02b62ed950 --- /dev/null +++ b/augur/templates/status.j2 @@ -0,0 +1,233 @@ + + + +

Collection Status

+
+
+
+

Pull Requests

+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+

Issues

+
+
+
+
+
+
+
+

Time between most recently collected issue and last collection run

+
+
+
+
+
+
+
+

Commits

+
+
+
+
+
+
+
+ +
+
+ + diff --git a/augur/templates/toasts.j2 b/augur/templates/toasts.j2 new file mode 100644 index 0000000000..cf707754f2 --- /dev/null +++ b/augur/templates/toasts.j2 @@ -0,0 +1,60 @@ +{% with messages = get_flashed_messages() %} +
+ {% if messages %} + {% for message in messages %} + + {% endfor %} + {% endif %} +
+ +{% endwith %} + + + + diff --git a/augur/templates/user-group-repos-table.j2 b/augur/templates/user-group-repos-table.j2 new file mode 100644 index 0000000000..0b3adefc25 --- /dev/null +++ b/augur/templates/user-group-repos-table.j2 @@ -0,0 +1,113 @@ +{% if repos %} + +

{{ group }}

+ + +{# Create the header row for the repo table: + Here we dynamically generate the header row by defining a dictionary list + which contains the titles of each column, accompanied by an optional "key" + item. If a column definition contains a "key" item, that column is assumed + to be sortable, sorting links for that data are generated using the given + key. It is done this way because the client does not receive the full data + each time they load the page, and instead the server sorts the full data. +#} +{# "title" : "Group", "key" : "rg_name"}, #} +{%- set tableHeaders = + [{"title" : "#"}, + {"title" : "Repo Name", "key" : "repo_name"}, + {"title" : "Reports"}, + {"title" : "Commits", "key" : "commits_all_time"}, + {"title" : "Issues", "key" : "issues_all_time"}, + {"title" : "Change Requests"}, + {"title" : "Remove"}] -%} +
+ + + + + {%- for header in tableHeaders -%} + {% if header.key %} + {%- if sorting == header.key -%} + {%- set sorting_link = url_for(PS, q=query_key, p=activePage, s=header.key, r= not reverse) -%} + {%- else -%} + {%- set sorting_link = url_for(PS, q=query_key, p=activePage, s=header.key) -%} + {%- endif -%} + + {% else -%} + + {% endif %} {%- endfor -%} + + + + + {% for repo in repos %} + + + + {# #} + + + + + + + {% endfor %} + +
{{ header.title }} + {%- if sorting == header.key and reverse %} ▲ {% elif sorting == header.key %} ▼ {% endif %}{{ header.title }}
{{loop.index + (activePage) * offset}}{{ repo.repo_name }}{{ repo.rg_name }}TODO{{ repo.commits_all_time|int }}{{ repo.issues_all_time|int }}TODO
+
+ +
+ +{% elif query_key %} +

Your search did not match any repositories

+{% elif current_user.is_authenticated %} +

No Repos Tracked

+

Add repos to your personal tracker in your profile page

+{% elif activePage != 0 %} +

Invalid Page

+Click here to go back +{% else %} +

No repos in group

+

Add repos to this group in your profile page

+{% endif %} + + diff --git a/augur/util/repo_load_controller.py b/augur/util/repo_load_controller.py index b9e6b1c2c4..9c0317cee6 100644 --- a/augur/util/repo_load_controller.py +++ b/augur/util/repo_load_controller.py @@ -2,27 +2,73 @@ import logging import sqlalchemy as s +import pandas as pd +import base64 from typing import List, Any, Dict - +from augur.application.db.engine import create_database_engine from augur.tasks.github.util.github_paginator import hit_api from augur.tasks.github.util.github_paginator import GithubPaginator from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.application.db.session import DatabaseSession -from augur.application.db.models import Repo, UserRepo, RepoGroup +from augur.application.db.models import Repo, UserRepo, RepoGroup, UserGroup, User from augur.application.db.util import execute_session_query - logger = logging.getLogger(__name__) - REPO_ENDPOINT = "https://api.github.com/repos/{}/{}" ORG_REPOS_ENDPOINT = "https://api.github.com/orgs/{}/repos?per_page=100" DEFAULT_REPO_GROUP_IDS = [1, 10] CLI_USER_ID = 1 +def parse_repo_url(url: str) -> tuple: + """ Gets the owner and repo from a url. + + Args: + url: Github url + + Returns: + Tuple of owner and repo. Or a tuple of None and None if the url is invalid. + """ + + if url.endswith(".github") or url.endswith(".github.io") or url.endswith(".js"): + + result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _ \.]+)(.git)?\/?$", url) + else: + + result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _]+)(.git)?\/?$", url) + + if not result: + return None, None + + capturing_groups = result.groups() + + + owner = capturing_groups[0] + repo = capturing_groups[1] + + return owner, repo + +def parse_org_url(url): + """ Gets the owner from a org url. + + Args: + url: Github org url + + Returns: + Org name. Or None if the url is invalid. + """ + + result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/?$", url) + + if not result: + return None + + # if the result is not None then the groups should be valid so we don't worry about index errors here + return result.groups()[0] + class RepoLoadController: @@ -32,7 +78,7 @@ def __init__(self, gh_session): def is_valid_repo(self, url: str) -> bool: """Determine whether repo url is valid. - + Args: url: repo_url @@ -40,17 +86,12 @@ def is_valid_repo(self, url: str) -> bool: True if repo url is valid and False if not """ - owner, repo = self.parse_repo_url(url) - if not owner or not repo: - return False + if not self.session.oauths.list_of_keys: + return False, {"status": "No valid github api keys to retrieve data with"} - if repo.endswith(".git"): - # removes .git - repo = repo[:-4] - - if repo.endswith("/"): - # reomves / - repo = repo[:-1] + owner, repo = parse_repo_url(url) + if not owner or not repo: + return False, {"status":"Invalid repo url"} url = REPO_ENDPOINT.format(owner, repo) @@ -65,9 +106,9 @@ def is_valid_repo(self, url: str) -> bool: # if there was an error return False if "message" in result.json().keys(): - return False - - return True + return False, {"status": f"Github Error: {result.json()['message']}"} + + return True, {"status": "Valid repo"} def retrieve_org_repos(self, url: str) -> List[str]: @@ -75,7 +116,7 @@ def retrieve_org_repos(self, url: str) -> List[str]: Note: If the org url is not valid it will return [] - + Args: url: org url @@ -83,19 +124,18 @@ def retrieve_org_repos(self, url: str) -> List[str]: List of valid repo urls or empty list if invalid org """ - owner = self.parse_org_url(url) + owner = parse_org_url(url) if not owner: - return False - - if owner.endswith("/"): - # reomves / - owner = owner[:-1] + return None, {"status": "Invalid owner url"} url = ORG_REPOS_ENDPOINT.format(owner) - + repos = [] with GithubTaskSession(logger) as session: - + + if not session.oauths.list_of_keys: + return None, {"status": "No valid github api keys to retrieve data with"} + for page_data, page in GithubPaginator(url, session.oauths, logger).iter_pages(): if page_data is None: @@ -105,17 +145,27 @@ def retrieve_org_repos(self, url: str) -> List[str]: repo_urls = [repo["html_url"] for repo in repos] - return repo_urls + return repo_urls, {"status": "Invalid owner url"} + + + def is_valid_repo_group_id(self, repo_group_id: int) -> bool: + """Deterime is repo_group_id exists. + Args: + repo_group_id: id from the repo groups table + + Returns: + True if it exists, False if it does not + """ - def is_valid_repo_group_id(self, repo_group_id): query = self.session.query(RepoGroup).filter(RepoGroup.repo_group_id == repo_group_id) - result = execute_session_query(query, 'one') - if result and result.repo_group_id == repo_group_id: - return True + try: + result = execute_session_query(query, 'one') + except (s.orm.exc.NoResultFound, s.orm.exc.MultipleResultsFound): + return False - return False + return True def add_repo_row(self, url: str, repo_group_id: int, tool_source): """Add a repo to the repo table. @@ -128,6 +178,9 @@ def add_repo_row(self, url: str, repo_group_id: int, tool_source): If repo row exists then it will update the repo_group_id if param repo_group_id is not a default. If it does not exist is will simply insert the repo. """ + if not isinstance(url, str) or not isinstance(repo_group_id, int) or not isinstance(tool_source, str): + return None + if not self.is_valid_repo_group_id(repo_group_id): return None @@ -148,18 +201,18 @@ def add_repo_row(self, url: str, repo_group_id: int, tool_source): return None if repo_group_id not in DEFAULT_REPO_GROUP_IDS: - # update the repo group id + # update the repo group id query = self.session.query(Repo).filter(Repo.repo_git == url) repo = execute_session_query(query, 'one') if not repo.repo_group_id == repo_group_id: repo.repo_group_id = repo_group_id self.session.commit() - + return result[0]["repo_id"] - def add_repo_to_user(self, repo_id, user_id=1): + def add_repo_to_user_group(self, repo_id: int, group_id:int = 1) -> bool: """Add a repo to a user in the user_repos table. Args: @@ -167,87 +220,225 @@ def add_repo_to_user(self, repo_id, user_id=1): user_id: id of user_id from users table """ - repo_user_data = { - "user_id": user_id, + if not isinstance(repo_id, int) or not isinstance(group_id, int): + return False + + repo_user_group_data = { + "group_id": group_id, "repo_id": repo_id } - - repo_user_unique = ["user_id", "repo_id"] - return_columns = ["user_id", "repo_id"] - data = self.session.insert_data(repo_user_data, UserRepo, repo_user_unique, return_columns) - if data[0]["user_id"] == user_id and data[0]["repo_id"] == repo_id: - return True - return False + repo_user_group_unique = ["group_id", "repo_id"] + return_columns = ["group_id", "repo_id"] + + try: + data = self.session.insert_data(repo_user_group_data, UserRepo, repo_user_group_unique, return_columns) + except s.exc.IntegrityError: + return False + + return data[0]["group_id"] == group_id and data[0]["repo_id"] == repo_id + + def add_user_group(self, user_id:int, group_name:str) -> dict: + """Add a group to the user. + + Args + user_id: id of the user + group_name: name of the group being added + + Returns: + Dict with status key that indicates the success of the operation + + Note: + If group already exists the function will return that it has been added, but a duplicate group isn't added. + It simply detects that it already exists and doesn't add it. + """ + + if not isinstance(user_id, int) or not isinstance(group_name, str): + return False, {"status": "Invalid input"} + + user_group_data = { + "name": group_name, + "user_id": user_id + } + + user_group = self.session.query(UserGroup).filter(UserGroup.user_id == user_id, UserGroup.name == group_name).first() + if user_group: + return False, {"status": "Group already exists"} + + try: + result = self.session.insert_data(user_group_data, UserGroup, ["name", "user_id"], return_columns=["group_id"]) + except s.exc.IntegrityError: + return False, {"status": "Error: User id does not exist"} + + + if result: + return True, {"status": "Group created"} + + + return False, {"status": "Error while creating group"} + + def remove_user_group(self, user_id: int, group_name: str) -> dict: + """ Delete a users group of repos. + + Args: + user_id: id of the user + group_name: name of the users group + + Returns: + Dict with a status key that indicates the result of the operation + + """ + + group = self.session.query(UserGroup).filter(UserGroup.name == group_name, UserGroup.user_id == user_id).first() + if not group: + return False, {"status": "WARNING: Trying to delete group that does not exist"} + + # delete rows from user repos with group_id + for repo in group.repos: + self.session.delete(repo) + + # delete group from user groups table + self.session.delete(group) + + self.session.commit() + + return True, {"status": "Group deleted"} + + + def convert_group_name_to_id(self, user_id: int, group_name: str) -> int: + """Convert a users group name to the database group id. + + Args: + user_id: id of the user + group_name: name of the users group + + Returns: + None on failure. The group id on success. + + """ + + if not isinstance(user_id, int) or not isinstance(group_name, str): + return None + + try: + user_group = self.session.query(UserGroup).filter(UserGroup.user_id == user_id, UserGroup.name == group_name).one() + except s.orm.exc.NoResultFound: + return None - def add_frontend_repo(self, url: List[str], user_id: int, repo_group_id: int = None, valid_repo=False): + return user_group.group_id + + def get_user_groups(self, user_id: int) -> List: + + return self.session.query(UserGroup).filter(UserGroup.user_id == user_id).all() + + def get_user_group_repos(self, group_id: int) -> List: + user_repos = self.session.query(UserRepo).filter(UserRepo.group_id == group_id).all() + + return [user_repo.repo for user_repo in user_repos] + + + def add_frontend_repo(self, url: List[str], user_id: int, group_name=None, group_id=None, valid_repo=False) -> dict: """Add list of repos to a users repos. Args: urls: list of repo urls user_id: id of user_id from users table - repo_group_id: repo_group_id to add the repo to + group_name: name of group to add repo to. + group_id: id of the group + valid_repo: boolean that indicates whether the repo has already been validated Note: - If no repo_group_id is passed the repo will be added to a default repo_group + Either the group_name or group_id can be passed not both + + Returns: + Dict that contains the key "status" and additional useful data """ - if not valid_repo and not self.is_valid_repo(url): - self.session.logger.info(f"Invalid repo: {url}") - return {"status": "Invalid repo", "repo_url": url} + if group_name and group_id: + return False, {"status": "Pass only the group name or group id not both"} - if not repo_group_id: - repo_group_id = DEFAULT_REPO_GROUP_IDS[0] + if group_id is None: - repo_id = self.add_repo_row(url, repo_group_id, "Frontend") + group_id = self.convert_group_name_to_id(user_id, group_name) + if group_id is None: + return False, {"status": "Invalid group name"} + if not valid_repo: + result = self.is_valid_repo(url) + if not result[0]: + return False, {"status": result[1]["status"], "repo_url": url} + + repo_id = self.add_repo_row(url, DEFAULT_REPO_GROUP_IDS[0], "Frontend") if not repo_id: - return {"status": "Repo insertion failed", "repo_url": url} + return False, {"status": "Repo insertion failed", "repo_url": url} - result = self.add_repo_to_user(repo_id, user_id) + result = self.add_repo_to_user_group(repo_id, group_id) if not result: - return {"status": "repo_user insertion failed", "repo_url": url} + return False, {"status": "repo_user insertion failed", "repo_url": url} + + return True, {"status": "Repo Added", "repo_url": url} - return {"status": "Repo Added", "repo_url": url} + def remove_frontend_repo(self, repo_id:int, user_id:int, group_name:str) -> dict: + """ Remove repo from a users group. - + Args: + repo_id: id of the repo to remove + user_id: id of the user + group_name: name of group the repo is being removed from + + Returns: + Dict with a key of status that indicates the result of the operation + """ + + if not isinstance(repo_id, int) or not isinstance(user_id, int) or not isinstance(group_name, str): + return False, {"status": "Invalid types"} + + group_id = self.convert_group_name_to_id(user_id, group_name) + if group_id is None: + return False, {"status": "Invalid group name"} + + # delete rows from user repos with group_id + self.session.query(UserRepo).filter(UserRepo.group_id == group_id, UserRepo.repo_id == repo_id).delete() + self.session.commit() - def add_frontend_org(self, url: List[str], user_id: int): + return True, {"status": "Repo Removed"} + + + def add_frontend_org(self, url: List[str], user_id: int, group_name: int): """Add list of orgs and their repos to a users repos. Args: urls: list of org urls user_id: id of user_id from users table """ + group_id = self.convert_group_name_to_id(user_id, group_name) + if group_id is None: + return False, {"status": "Invalid group name"} - repos = self.retrieve_org_repos(url) - - if not repos: - return {"status": "Invalid org", "org_url": url} - - org_name = self.parse_org_url(url) - if not org_name: - return {"status": "Invalid org", "org_url": url} + result = self.retrieve_org_repos(url) + if not result[0]: + return False, result[1] + repos = result[0] # try to get the repo group with this org name # if it does not exist create one failed_repos = [] for repo in repos: - result = self.add_frontend_repo(repo, user_id, valid_repo=True) + result = self.add_frontend_repo(repo, user_id, group_id=group_id, valid_repo=True) # keep track of all the repos that failed - if result["status"] != "Repo Added": + if not result[0]: failed_repos.append(repo) failed_count = len(failed_repos) if failed_count > 0: # this should never happen because an org should never return invalid repos - return {"status": f"{failed_count} repos failed", "repo_urls": failed_repos, "org_url": url} + return False, {"status": f"{failed_count} repos failed", "repo_urls": failed_repos, "org_url": url} - return {"status": "Org repos added", "org_url": url} + return True, {"status": "Org repos added"} def add_cli_repo(self, repo_data: Dict[str, Any], valid_repo=False): """Add list of repos to specified repo_groups @@ -259,7 +450,7 @@ def add_cli_repo(self, repo_data: Dict[str, Any], valid_repo=False): url = repo_data["url"] repo_group_id = repo_data["repo_group_id"] - if valid_repo or self.is_valid_repo(url): + if valid_repo or self.is_valid_repo(url)[0]: # if the repo doesn't exist it adds it # if the repo does exist it updates the repo_group_id @@ -267,9 +458,9 @@ def add_cli_repo(self, repo_data: Dict[str, Any], valid_repo=False): if not repo_id: logger.warning(f"Invalid repo group id specified for {url}, skipping.") - return + return {"status": f"Invalid repo group id specified for {url}, skipping."} - self.add_repo_to_user(repo_id, CLI_USER_ID) + self.add_repo_to_user_group(repo_id) def add_cli_org(self, org_name): """Add list of orgs and their repos to specified repo_groups @@ -280,18 +471,19 @@ def add_cli_org(self, org_name): url = f"https://github.com/{org_name}" repos = self.retrieve_org_repos(url) - + if not repos: print( f"No organization with name {org_name} could be found") - return + return {"status": "No organization found"} # check if the repo group already exists query = self.session.query(RepoGroup).filter(RepoGroup.rg_name == org_name) rg = execute_session_query(query, 'first') if rg: print(f"{rg.rg_name} is already a repo group") - return + + return {"status": "Already a repo group"} print(f'Organization "{org_name}" found') @@ -307,6 +499,8 @@ def add_cli_org(self, org_name): f"Adding {repo_url}") self.add_cli_repo({"url": repo_url, "repo_group_id": repo_group_id}, valid_repo=True) + return {"status": "Org added"} + def get_user_repo_ids(self, user_id: int) -> List[int]: """Retrieve a list of repos_id for the given user_id. @@ -318,57 +512,173 @@ def get_user_repo_ids(self, user_id: int) -> List[int]: list of repo ids """ - user_repo_id_query = s.sql.text(f"""SELECT * FROM augur_operations.user_repos WHERE user_id={user_id};""") + user_groups = self.session.query(UserGroup).filter(UserGroup.user_id == user_id).all() + + all_repo_ids = set() + for group in user_groups: + + repo_ids = [user_repo.repo.repo_id for user_repo in group.repos] + all_repo_ids.update(repo_ids) + + + return list(all_repo_ids) + + + def paginate_repos(self, source, page=0, page_size=25, sort="repo_id", direction="ASC", **kwargs): + + if not source: + print("Func: paginate_repos. Error: Source Required") + return None, {"status": "Source Required"} + + if source not in ["all", "user", "group"]: + print("Func: paginate_repos. Error: Invalid source") + return None, {"Invalid source"} + + if direction and direction != "ASC" and direction != "DESC": + print("Func: paginate_repos. Error: Invalid direction") + return None, {"status": "Invalid direction"} + + try: + page = int(page) if page else 0 + page_size = int(page_size) if page else 25 + except TypeError: + print("Func: paginate_repos. Error: Page size and page should be integers") + return None, {"status": "Page size and page should be integers"} + + if page < 0 or page_size < 0: + print("Func: paginate_repos. Error: Page size and page should be positive") + return None, {"status": "Page size and page should be postive"} + order_by = sort if sort else "repo_id" + order_direction = direction if direction else "ASC" - result = self.session.execute_sql(user_repo_id_query).fetchall() + query = self.generate_repo_query(source, count=False, order_by=order_by, direction=order_direction, + page=page, page_size=page_size, **kwargs) + if not query[0]: + return None, {"status": query[1]["status"]} - if len(result) == 0: - return [] + if query[1]["status"] == "No data": + return [], {"status": "No data"} - repo_ids = [dict(row)["repo_id"] for row in result] + get_page_of_repos_sql = s.sql.text(query[0]) - return repo_ids + results = pd.read_sql(get_page_of_repos_sql, create_database_engine()) + results['url'] = results['url'].apply(lambda datum: datum.split('//')[1]) + b64_urls = [] + for i in results.index: + b64_urls.append(base64.b64encode((results.at[i, 'url']).encode())) + results['base64_url'] = b64_urls + data = results.to_dict(orient="records") - def parse_repo_url(self, url): + for row in data: + row["repo_name"] = re.search(r"github\.com\/[A-Za-z0-9 \- _]+\/([A-Za-z0-9 \- _ .]+)$", row["url"]).groups()[0] - if url.endswith(".github") or url.endswith(".github.io"): - - result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _ \.]+)(.git)?\/?$", url) + return data, {"status": "success"} + + def get_repo_count(self, source, **kwargs): + + if not source: + print("Func: get_repo_count. Error: Source Required") + return None, {"status": "Source Required"} + + if source not in ["all", "user", "group"]: + print("Func: get_repo_count. Error: Invalid source") + return None, {"status": "Invalid source"} + + user = kwargs.get("user") + group_name = kwargs.get("group_name") + + query = self.generate_repo_query(source, count=True, user=user, group_name=group_name) + if not query[0]: + return None, query[1] + + if query[1]["status"] == "No data": + return 0, {"status": "No data"} + + # surround query with count query so we just get the count of the rows + final_query = f"SELECT count(*) FROM ({query[0]}) a;" + + get_page_of_repos_sql = s.sql.text(final_query) + + result = self.session.fetchall_data_from_sql_text(get_page_of_repos_sql) + + return result[0]["count"], {"status": "success"} + + + def generate_repo_query(self, source, count, **kwargs): + + if count: + # only query for repos ids so the query is faster for getting the count + select = " DISTINCT(augur_data.repo.repo_id)" else: - result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _ \.]+)(.git)?\/?$", url) + select = """ DISTINCT(augur_data.repo.repo_id), + augur_data.repo.description, + augur_data.repo.repo_git AS url, + augur_data.repo.repo_status, + a.commits_all_time, + b.issues_all_time, + rg_name, + augur_data.repo.repo_group_id""" + + query = f""" + SELECT + {select} + FROM + augur_data.repo + LEFT OUTER JOIN augur_data.api_get_all_repos_commits a ON augur_data.repo.repo_id = a.repo_id + LEFT OUTER JOIN augur_data.api_get_all_repos_issues b ON augur_data.repo.repo_id = b.repo_id + JOIN augur_data.repo_groups ON augur_data.repo.repo_group_id = augur_data.repo_groups.repo_group_id\n""" + + if source == "user": + + user = kwargs.get("user") + if not user: + print("Func: generate_repo_query. Error: User not passed when trying to get user repos") + return None, {"status": "User not passed when trying to get user repos"} + + if not user.groups: + return None, {"status": "No data"} - if not result: - return None, None + query += "\t\t JOIN augur_operations.user_repos ON augur_data.repo.repo_id = augur_operations.user_repos.repo_id\n" + query += "\t\t JOIN augur_operations.user_groups ON augur_operations.user_repos.group_id = augur_operations.user_groups.group_id\n" + query += f"\t\t WHERE augur_operations.user_groups.user_id = {user.user_id}\n" - capturing_groups = result.groups() + elif source == "group": - try: - owner = capturing_groups[0] - repo = capturing_groups[1] + with GithubTaskSession(logger) as session: - return owner, repo - except IndexError: - return None, None + controller = RepoLoadController(session) - def parse_org_url(self, url): + user = kwargs.get("user") + if not user: + print("Func: generate_repo_query. Error: User not specified") + return None, {"status": "User not specified"} - result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/?$", url) + group_name = kwargs.get("group_name") + if not group_name: + print("Func: generate_repo_query. Error: Group name not specified") + return None, {"status": "Group name not specified"} - if not result: - return None + group_id = controller.convert_group_name_to_id(user.user_id, group_name) + if group_id is None: + print("Func: generate_repo_query. Error: Group does not exist") + return None, {"status": "Group does not exists"} - capturing_groups = result.groups() + query += "\t\t JOIN augur_operations.user_repos ON augur_data.repo.repo_id = augur_operations.user_repos.repo_id\n" + query += f"\t\t WHERE augur_operations.user_repos.group_id = {group_id}\n" - try: - owner = capturing_groups[0] - return owner - except IndexError: - return None + if not count: + order_by = kwargs.get("order_by") or "repo_id" + direction = kwargs.get("direction") or "ASC" + page = kwargs.get("page") or 0 + page_size = kwargs.get("page_size") or 25 - + query += f"\t ORDER BY {order_by} {direction}\n" + query += f"\t LIMIT {page_size}\n" + query += f"\t OFFSET {page*page_size};\n" + return query, {"status": "success"} diff --git a/setup.py b/setup.py index 80891ead96..75f424036d 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ "Beaker==1.11.0", # 1.11.0 "SQLAlchemy==1.3.23", # 1.4.40 "itsdangerous==2.0.1", # 2.1.2 - "Jinja2==3.0.2", # 3.1.2 + 'Jinja2~=3.0.3', "Flask==2.0.2", # 2.2.2 "Flask-Cors==3.0.10", "Flask-Login==0.5.0", @@ -77,6 +77,7 @@ "eventlet==0.33.1", "flower==1.2.0", "tornado==6.1", # added because it sometimes errors when tornado is not 6.1 even though nothing we install depends on it + 'Werkzeug~=2.0.0', "pylint==2.15.5" ], extras_require={ diff --git a/tests/test_applicaton/test_repo_load_controller/helper.py b/tests/test_applicaton/test_repo_load_controller/helper.py new file mode 100644 index 0000000000..b05be747a7 --- /dev/null +++ b/tests/test_applicaton/test_repo_load_controller/helper.py @@ -0,0 +1,163 @@ +import sqlalchemy as s +import logging + +from augur.util.repo_load_controller import ORG_REPOS_ENDPOINT + +from augur.application.db.session import DatabaseSession +from augur.application.db.models import Config +from augur.tasks.github.util.github_paginator import hit_api +from augur.application.db.util import execute_session_query + +logger = logging.getLogger(__name__) + + +######## Helper Functions to Get Delete statements ################# + +def get_delete_statement(schema, table): + + return """DELETE FROM "{}"."{}";""".format(schema, table) + +def get_repo_delete_statement(): + + return get_delete_statement("augur_data", "repo") + +def get_repo_group_delete_statement(): + + return get_delete_statement("augur_data", "repo_groups") + +def get_user_delete_statement(): + + return get_delete_statement("augur_operations", "users") + +def get_user_repo_delete_statement(): + + return get_delete_statement("augur_operations", "user_repos") + +def get_user_group_delete_statement(): + + return get_delete_statement("augur_operations", "user_groups") + +def get_config_delete_statement(): + + return get_delete_statement("augur_operations", "config") + +def get_repo_related_delete_statements(table_list): + """Takes a list of tables related to the RepoLoadController class and generates a delete statement. + + Args: + table_list: list of table names. Valid table names are + "user_repos" or "user_repo", "repo" or "repos", "repo_groups" or "repo_group:, "user" or "users", and "config" + + """ + + query_list = [] + if "user_repos" in table_list or "user_repo" in table_list: + query_list.append(get_user_repo_delete_statement()) + + if "user_groups" in table_list or "user_group" in table_list: + query_list.append(get_user_group_delete_statement()) + + if "repos" in table_list or "repo" in table_list: + query_list.append(get_repo_delete_statement()) + + if "repo_groups" in table_list or "repo_group" in table_list: + query_list.append(get_repo_group_delete_statement()) + + if "users" in table_list or "user" in table_list: + query_list.append(get_user_delete_statement()) + + if "config" in table_list: + query_list.append(get_config_delete_statement()) + + return " ".join(query_list) + +######## Helper Functions to add github api keys from prod db to test db ################# +def add_keys_to_test_db(test_db_engine): + + row = None + section_name = "Keys" + setting_name = "github_api_key" + with DatabaseSession(logger) as session: + query = session.query(Config).filter(Config.section_name==section_name, Config.setting_name==setting_name) + row = execute_session_query(query, 'one') + + with DatabaseSession(logger, test_db_engine) as test_session: + new_row = Config(section_name=section_name, setting_name=setting_name, value=row.value, type="str") + test_session.add(new_row) + test_session.commit() + + +######## Helper Functions to get insert statements ################# + +def get_repo_insert_statement(repo_id, rg_id, repo_url="place holder url", repo_status="New"): + + return """INSERT INTO "augur_data"."repo" ("repo_id", "repo_group_id", "repo_git", "repo_path", "repo_name", "repo_added", "repo_status", "repo_type", "url", "owner_id", "description", "primary_language", "created_at", "forked_from", "updated_at", "repo_archived_date_collected", "repo_archived", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, {}, '{}', NULL, NULL, '2022-08-15 21:08:07', '{}', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'CLI', '1.0', 'Git', '2022-08-15 21:08:07');""".format(repo_id, rg_id, repo_url, repo_status) + +def get_user_repo_insert_statement(repo_id, group_id): + + return """INSERT INTO "augur_operations"."user_repos" ("repo_id", "group_id") VALUES ({}, {});""".format(repo_id, group_id) + +def get_repo_group_insert_statement(rg_id): + + return """INSERT INTO "augur_data"."repo_groups" ("repo_group_id", "rg_name", "rg_description", "rg_website", "rg_recache", "rg_last_modified", "rg_type", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, 'Default Repo Group', 'The default repo group created by the schema generation script', '', 0, '2019-06-03 15:55:20', 'GitHub Organization', 'load', 'one', 'git', '2019-06-05 13:36:25');""".format(rg_id) + +def get_user_insert_statement(user_id, username="bil", email="default@gmail.com"): + + return """INSERT INTO "augur_operations"."users" ("user_id", "login_name", "login_hashword", "email", "first_name", "last_name", "admin") VALUES ({}, '{}', 'pass', '{}', 'bill', 'bob', false);""".format(user_id, username, email) + +def get_user_group_insert_statement(user_id, group_name, group_id=None): + + if group_id: + return """INSERT INTO "augur_operations"."user_groups" ("group_id", "user_id", "name") VALUES ({}, {}, '{}');""".format(group_id, user_id, group_name) + + return """INSERT INTO "augur_operations"."user_groups" (user_id", "name") VALUES (1, 'default');""".format(user_id, group_name) + + +######## Helper Functions to get retrieve data from tables ################# + +def get_repos(connection, where_string=None): + + query_list = [] + query_list.append('SELECT * FROM "augur_data"."repo"') + + if where_string: + if where_string.endswith(";"): + query_list.append(where_string[:-1]) + + query_list.append(where_string) + + query_list.append(";") + + query = s.text(" ".join(query_list)) + + return connection.execute(query).fetchall() + +def get_user_repos(connection): + + return connection.execute(s.text("""SELECT * FROM "augur_operations"."user_repos";""")).fetchall() + + +######## Helper Functions to get repos in an org ################# + +def get_org_repos(org_name, session): + + attempts = 0 + while attempts < 10: + result = hit_api(session.oauths, ORG_REPOS_ENDPOINT.format(org_name), logger) + + # if result is None try again + if not result: + attempts += 1 + continue + + response = result.json() + + if response: + return response + + return None + +def get_org_repo_count(org_name, session): + + repos = get_org_repos(org_name, session) + return len(repos) diff --git a/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py b/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py new file mode 100644 index 0000000000..8e9b104b38 --- /dev/null +++ b/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py @@ -0,0 +1,148 @@ +import pytest +import logging + +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.tasks.github.util.github_task_session import GithubTaskSession + +from augur.util.repo_load_controller import RepoLoadController, DEFAULT_REPO_GROUP_IDS, CLI_USER_ID + + +logger = logging.getLogger(__name__) + + +VALID_ORG = {"org": "CDCgov", "repo_count": 246} + + +def test_add_frontend_org_with_invalid_org(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + + data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "org_name": "chaosssss", "user_group_name": "test_group", "user_group_id": 1} + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + url = f"https://github.com/{data['org_name']}/" + result = controller.add_frontend_org(url, data["user_id"], data["user_group_name"]) + assert result["status"] == "Invalid org" + + # test with invalid group name + result = controller.add_frontend_org(url, data["user_id"], "Invalid group name") + assert result["status"] == "Invalid group name" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_frontend_org_with_valid_org(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "org_name": VALID_ORG["org"], "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + url = "https://github.com/{}/".format(data["org_name"]) + result = RepoLoadController(session).add_frontend_org(url, data["user_id"], data["user_group_name"]) + assert result["status"] == "Org repos added" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == VALID_ORG["repo_count"] + + user_repo_result = get_user_repos(connection) + assert user_repo_result is not None + assert len(user_repo_result) == VALID_ORG["repo_count"] + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_cli_org_with_valid_org(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": VALID_ORG["org"], "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + repo_count = None + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + result = controller.add_cli_org(data["org_name"]) + + assert result["status"] == "Org added" + + result2 = controller.add_cli_org("Invalid org") + assert result2["status"] == "No organization found" + + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == VALID_ORG["repo_count"] + + user_repo_result = get_user_repos(connection) + assert user_repo_result is not None + assert len(user_repo_result) == VALID_ORG["repo_count"] + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + diff --git a/tests/test_applicaton/test_repo_load_controller/test_adding_repos.py b/tests/test_applicaton/test_repo_load_controller/test_adding_repos.py new file mode 100644 index 0000000000..7f65b1e017 --- /dev/null +++ b/tests/test_applicaton/test_repo_load_controller/test_adding_repos.py @@ -0,0 +1,233 @@ +import pytest +import logging + +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.tasks.github.util.github_task_session import GithubTaskSession + +from augur.util.repo_load_controller import RepoLoadController, DEFAULT_REPO_GROUP_IDS, CLI_USER_ID + + + +logger = logging.getLogger(__name__) + + +def test_add_frontend_repos_with_invalid_repo(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + url = "https://github.com/chaoss/whitepaper" + + data = {"user_id": 2, "repo_group_id": 5, "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + result = RepoLoadController(session).add_frontend_repo(url, data["user_id"], data["user_group_name"]) + + assert result["status"] == "Invalid repo" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_cli_repos_with_invalid_repo_group_id(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": "operate-first", "repo_name": "operate-first-twitter", "user_group_name": "test_group", "user_group_id": 1} + url = f"https://github.com/{data['org_name']}/{data['repo_name']}" + + query_statements = [] + query_statements.append(clear_tables_statement) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + repo_data = {"url": url, "repo_group_id": 5} + + controller = RepoLoadController(session) + result = controller.add_cli_repo(repo_data) + assert result["status"] == f"Invalid repo group id specified for {repo_data['url']}, skipping." + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + + + +def test_add_cli_repos_with_duplicates(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": "operate-first", "repo_name": "operate-first-twitter", "user_group_name": "test_group", "user_group_id": 1} + url = f"https://github.com/{data['org_name']}/{data['repo_name']}" + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + repo_data = {"url": url, "repo_group_id": data["repo_group_id"]} + + controller = RepoLoadController(session) + controller.add_cli_repo(repo_data) + controller.add_cli_repo(repo_data) + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + + assert result is not None + assert len(result) == 1 + assert dict(result[0])["repo_git"] == url + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + + +def test_add_frontend_repos_with_duplicates(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + url = "https://github.com/operate-first/operate-first-twitter" + + data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + result = controller.add_frontend_repo(url, data["user_id"], data["user_group_name"]) + result2 = controller.add_frontend_repo(url, data["user_id"], data["user_group_name"]) + + # add repo with invalid group name + result3 = controller.add_frontend_repo(url, data["user_id"], "Invalid group name") + + assert result["status"] == "Repo Added" + assert result2["status"] == "Repo Added" + assert result3["status"] == "Invalid group name" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == 1 + assert dict(result[0])["repo_git"] == url + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + + +def test_remove_frontend_repo(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + url = "https://github.com/operate-first/operate-first-twitter" + + data = {"user_id": 2, "repo_id": 5, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + query_statements.append(get_repo_insert_statement(data["repo_id"], data["repo_group_id"], repo_url="url")) + query_statements.append(get_user_repo_insert_statement(data["repo_id"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + # remove valid user repo + result = controller.remove_frontend_repo(data["repo_id"], data["user_id"], data["user_group_name"]) + assert result["status"] == "Repo Removed" + + with test_db_engine.connect() as connection: + + repos = get_user_repos(connection) + assert len(repos) == 0 + + # remove invalid group + result = controller.remove_frontend_repo(data["repo_id"], data["user_id"], "invalid group") + assert result["status"] == "Invalid group name" + + # pass invalid data types + result = controller.remove_frontend_repo("5", data["user_id"], data["user_group_name"]) + assert result["status"] == "Invalid input params" + + result = controller.remove_frontend_repo(data["repo_id"], "1", data["user_group_name"]) + assert result["status"] == "Invalid input params" + + result = controller.remove_frontend_repo(data["repo_id"], data["user_id"], 5) + assert result["status"] == "Invalid input params" + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) \ No newline at end of file diff --git a/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py b/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py new file mode 100644 index 0000000000..9034b42a84 --- /dev/null +++ b/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py @@ -0,0 +1,726 @@ +import logging +import pytest +import sqlalchemy as s + + +from augur.util.repo_load_controller import RepoLoadController, DEFAULT_REPO_GROUP_IDS, CLI_USER_ID + +from augur.application.db.session import DatabaseSession +from augur.tasks.github.util.github_task_session import GithubTaskSession +from tests.test_applicaton.test_repo_load_controller.helper import * + +logger = logging.getLogger(__name__) + + +def test_parse_repo_url(): + + with DatabaseSession(logger) as session: + + controller = RepoLoadController(session) + + assert controller.parse_repo_url("hello world") == (None, None) + assert controller.parse_repo_url("https://github.com/chaoss/hello") == ("chaoss", "hello") + assert controller.parse_repo_url("https://github.com/hello124/augur") == ("hello124", "augur") + assert controller.parse_repo_url("https://github.com//augur") == (None, None) + assert controller.parse_repo_url("https://github.com/chaoss/") == (None, None) + assert controller.parse_repo_url("https://github.com//") == (None, None) + assert controller.parse_repo_url("https://github.com/chaoss/augur") == ("chaoss", "augur") + assert controller.parse_repo_url("https://github.com/chaoss/augur/") == ("chaoss", "augur") + assert controller.parse_repo_url("https://github.com/chaoss/augur.git") == ("chaoss", "augur") + + +def test_parse_org_url(): + + with DatabaseSession(logger) as session: + + controller = RepoLoadController(session) + + assert controller.parse_org_url("hello world") == None, None + assert controller.parse_org_url("https://github.com/chaoss/") == "chaoss" + assert controller.parse_org_url("https://github.com/chaoss") == "chaoss" + assert controller.parse_org_url("https://github.com/hello124/augur") == None + assert controller.parse_org_url("https://github.com//augur") == None, None + assert controller.parse_org_url("https://github.com//") == None + assert controller.parse_org_url("https://github.com/chaoss/augur") == None + + +def test_is_valid_repo(): + + with GithubTaskSession(logger) as session: + + controller = RepoLoadController(session) + + assert controller.is_valid_repo("hello world") is False + assert controller.is_valid_repo("https://github.com/chaoss/hello") is False + assert controller.is_valid_repo("https://github.com/hello124/augur") is False + assert controller.is_valid_repo("https://github.com//augur") is False + assert controller.is_valid_repo("https://github.com/chaoss/") is False + assert controller.is_valid_repo("https://github.com//") is False + assert controller.is_valid_repo("https://github.com/chaoss/augur") is True + assert controller.is_valid_repo("https://github.com/chaoss/augur/") is True + assert controller.is_valid_repo("https://github.com/chaoss/augur.git") is True + assert controller.is_valid_repo("https://github.com/chaoss/augur/") is True + +def test_is_valid_repo_group_id(test_db_engine): + + clear_tables = ["repo_groups"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + + + data = {"rg_ids": [1, 2, 3], "repo_id": 1, "tool_source": "Frontend", + "repo_url": "https://github.com/chaoss/augur"} + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["rg_ids"][0])) + query_statements.append(get_repo_group_insert_statement(data["rg_ids"][1])) + query_statements.append(get_repo_group_insert_statement(data["rg_ids"][2])) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + # valid + assert controller.is_valid_repo_group_id(data["rg_ids"][0]) is True + assert controller.is_valid_repo_group_id(data["rg_ids"][1]) is True + assert controller.is_valid_repo_group_id(data["rg_ids"][2]) is True + + + # invalid + assert controller.is_valid_repo_group_id(-1) is False + assert controller.is_valid_repo_group_id(12) is False + assert controller.is_valid_repo_group_id(11111) is False + + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_repo_row(test_db_engine): + + clear_tables = ["repo", "repo_groups"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + data = {"rg_id": 1, + "tool_source": "Frontend", + "repo_urls": ["https://github.com/chaoss/augur", "https://github.com/chaoss/grimoirelab-sortinghat"] + } + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["rg_id"])) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + assert RepoLoadController(session).add_repo_row(data["repo_urls"][0], data["rg_id"], data["tool_source"]) is not None + assert RepoLoadController(session).add_repo_row(data["repo_urls"][1], data["rg_id"], data["tool_source"]) is not None + + # invalid rg_id + assert RepoLoadController(session).add_repo_row(data["repo_urls"][0], 12, data["tool_source"]) is None + + # invalid type for repo url + assert RepoLoadController(session).add_repo_row(1, data["rg_id"], data["tool_source"]) is None + + # invalid type for rg_id + assert RepoLoadController(session).add_repo_row(data["repo_urls"][1], "1", data["tool_source"]) is None + + # invalid type for tool_source + assert RepoLoadController(session).add_repo_row(data["repo_urls"][1], data["rg_id"], 52) is None + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == len(data["repo_urls"]) + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_repo_row_with_updates(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + data = {"old_rg_id": 1, "new_rg_id": 2, "repo_id": 1, "repo_id_2": 2, "tool_source": "Test", + "repo_url": "https://github.com/chaoss/augur", "repo_url_2": "https://github.com/chaoss/grimoirelab-perceval-opnfv", "repo_status": "Complete"} + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["old_rg_id"])) + query_statements.append(get_repo_group_insert_statement(data["new_rg_id"])) + query_statements.append(get_repo_insert_statement(data["repo_id"], data["old_rg_id"], repo_url=data["repo_url"], repo_status=data["repo_status"])) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + result = RepoLoadController(session).add_repo_row(data["repo_url"], data["new_rg_id"], data["tool_source"]) is not None + assert result == data["repo_id"] + + with test_db_engine.connect() as connection: + + result = get_repos(connection, where_string=f"WHERE repo_git='{data['repo_url']}'") + assert result is not None + assert len(result) == 1 + + value = dict(result[0]) + assert value["repo_status"] == data["repo_status"] + assert value["repo_group_id"] == data["new_rg_id"] + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_repo_to_user_group(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"repo_ids": [1, 2, 3], "repo_urls":["url 1", "url2", "url3"], "user_id": 2, "user_repo_group_id": 1, "user_group_ids": [1, 2], "user_group_names": ["test_group", "test_group_2"]} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["user_repo_group_id"])) + + for i in range(0, len(data["repo_ids"])): + query_statements.append(get_repo_insert_statement(data["repo_ids"][i], data["user_repo_group_id"], data["repo_urls"][i])) + + query_statements.append(get_user_insert_statement(data["user_id"])) + + for i in range(0, len(data["user_group_ids"])): + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_names"][i], data["user_group_ids"][i])) + + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + # add valid repo to group 0 + assert controller.add_repo_to_user_group(data["repo_ids"][0], data["user_group_ids"][0]) is True + + # add repo again to group 0 ... should be 1 repo row still + assert controller.add_repo_to_user_group(data["repo_ids"][0], data["user_group_ids"][0]) is True + + # add another valid repo to group 0 + assert controller.add_repo_to_user_group(data["repo_ids"][1], data["user_group_ids"][0]) is True + + # add same repo to group 1 + assert controller.add_repo_to_user_group(data["repo_ids"][0], data["user_group_ids"][1]) is True + + # add different repo to group 1 + assert controller.add_repo_to_user_group(data["repo_ids"][2], data["user_group_ids"][1]) is True + + # add with invalid repo id + assert controller.add_repo_to_user_group(130000, data["user_group_ids"][1]) is False + + # add with invalid group_id + assert controller.add_repo_to_user_group(data["repo_ids"][0], 133333) is False + + # pass invalid tpyes + assert controller.add_repo_to_user_group("130000", data["user_group_ids"][1]) is False + assert controller.add_repo_to_user_group(data["repo_ids"][0], "133333") is False + + + # end result + # 4 rows in table + # 2 rows in each group + + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_repos";""") + # WHERE "group_id"=:user_group_id AND "repo_id"=:repo_id + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 4 + + + query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "group_id"={};""".format(data["user_group_ids"][0])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 2 + + + query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "group_id"={};""".format(data["user_group_ids"][0])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 2 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_user_group(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = { + "users": [ + { + "id": 0, + "username": "user 1", + "email": "email 1" + }, + { + "id": 1, + "username": "user 2", + "email": "email 2" + } + ], + "group_names": ["test_group", "test_group_2"]} + + query_statements = [] + query_statements.append(clear_tables_statement) + + for user in data["users"]: + query_statements.append(get_user_insert_statement(user["id"], user["username"], user["email"])) + + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + # add valid group to user 0 + assert controller.add_user_group(data["users"][0]["id"], data["group_names"][0])["status"] == "Group created" + + # add group again to user 0 ... should be 1 group row still + assert controller.add_user_group(data["users"][0]["id"], data["group_names"][0])["status"] == "Group created" + + # add another valid group to user 0 + assert controller.add_user_group(data["users"][0]["id"], data["group_names"][1])["status"] == "Group created" + + # add same group to user 1 + assert controller.add_user_group(data["users"][1]["id"], data["group_names"][0])["status"] == "Group created" + + + # add with invalid user id + assert controller.add_user_group(130000, data["group_names"][0])["status"] == "Error: User id does not exist" + + # pass invalid tpyes + assert controller.add_user_group("130000", data["group_names"][0])["status"] == "Invalid input" + assert controller.add_user_group(data["users"][0]["id"], 133333)["status"] == "Invalid input" + + + # end result + # 3 groups in table + # 1 row for user 1 + # 2 rows for user 0 + + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 3 + + query = s.text("""SELECT * FROM "augur_operations"."user_groups" WHERE "user_id"={};""".format(data["users"][0]["id"])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 2 + + query = s.text("""SELECT * FROM "augur_operations"."user_groups" WHERE "user_id"={};""".format(data["users"][1]["id"])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 1 + + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + +def test_convert_group_name_to_id(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id =1 + + groups = [ + { + "group_name": "test group 1", + "group_id": 1 + }, + { + "group_name": "test group 2", + "group_id": 2 + }, + { + "group_name": "test group 3", + "group_id": 3 + }, + ] + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id)) + + for group in groups: + query_statements.append(get_user_group_insert_statement(user_id, group["group_name"], group["group_id"])) + + connection.execute("".join(query_statements)) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + for group in groups: + assert controller.convert_group_name_to_id(user_id, group["group_name"]) == group["group_id"] + + # test invalid group name + assert controller.convert_group_name_to_id(user_id, "hello") is None + + # test invalid user id + assert controller.convert_group_name_to_id(user_id*2, groups[0]["group_name"]) is None + + # test invalid types + assert controller.convert_group_name_to_id(user_id, 5) is None + assert controller.convert_group_name_to_id("5", groups[0]["group_name"]) is None + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_remove_user_group(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id =1 + repo_id = 1 + rg_id = 1 + + groups = [ + { + "group_name": "test group 1", + "group_id": 1 + }, + { + "group_name": "test group 2", + "group_id": 2 + }, + { + "group_name": "test group 3", + "group_id": 3 + }, + { + "group_name": "test group 4", + "group_id": 4 + }, + { + "group_name": "test group 5", + "group_id": 5 + } + ] + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id)) + + for group in groups: + query_statements.append(get_user_group_insert_statement(user_id, group["group_name"], group["group_id"])) + + query_statements.append(get_repo_group_insert_statement(rg_id)) + query_statements.append(get_repo_insert_statement(repo_id, rg_id)) + query_statements.append(get_user_repo_insert_statement(repo_id, groups[0]["group_id"])) + + connection.execute("".join(query_statements)) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + assert controller.remove_user_group(user_id, "hello")["status"] == "WARNING: Trying to delete group that does not exist" + + i = 0 + while(i < len(groups)-2): + assert controller.remove_user_group(user_id, groups[i]["group_name"])["status"] == "Group deleted" + i += 1 + + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == len(groups)-i + + + while(i < len(groups)): + + assert controller.remove_user_group(user_id, groups[i]["group_name"])["status"] == "Group deleted" + i += 1 + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + + +def test_get_user_groups(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id_1 = 1 + user_id_2 = 2 + + + groups = [ + { + "group_name": "test group 1", + "group_id": 1 + }, + { + "group_name": "test group 2", + "group_id": 2 + }, + { + "group_name": "test group 3", + "group_id": 3 + }, + { + "group_name": "test group 4", + "group_id": 4 + }, + { + "group_name": "test group 5", + "group_id": 5 + } + ] + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id_1)) + + # add user with no user groups + query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) + + for group in groups: + query_statements.append(get_user_group_insert_statement(user_id_1, group["group_name"], group["group_id"])) + + connection.execute("".join(query_statements)) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + assert len(controller.get_user_groups(user_id_1)) == len(groups) + + assert len(controller.get_user_groups(user_id_2)) == 0 + + + with test_db_engine.connect() as connection: + + user_group_delete_statement = get_user_group_delete_statement() + query = s.text(user_group_delete_statement) + + result = connection.execute(query) + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_get_user_group_repos(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id =1 + user_id_2 = 2 + group_id = 1 + group_id_2 = 2 + rg_id = 1 + group_name = "test_group 1" + repo_ids = [1, 2, 3, 4, 5] + repo_urls = ["url1", "url2", "url3", "url4", "url5"] + + query_statements = [] + query_statements.append(clear_tables_statement) + + # add user with a group that has multiple repos + query_statements.append(get_user_insert_statement(user_id)) + query_statements.append(get_user_group_insert_statement(user_id, group_name, group_id)) + + # add user with a group that has no repos + query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) + query_statements.append(get_user_group_insert_statement(user_id_2, group_name, group_id_2)) + + query_statements.append(get_repo_group_insert_statement(rg_id)) + + for i in range(0, len(repo_ids)): + query_statements.append(get_repo_insert_statement(repo_ids[i], rg_id, repo_urls[i])) + query_statements.append(get_user_repo_insert_statement(repo_ids[i], group_id)) + + connection.execute("".join(query_statements)) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + result = controller.get_user_group_repos(group_id) + + assert len(result) == len(repo_ids) + assert set([repo.repo_id for repo in result]) == set(repo_ids) + + result = controller.get_user_group_repos(group_id_2) + + assert len(result) == 0 + + + with test_db_engine.connect() as connection: + + user_repo_delete_statement = get_user_repo_delete_statement() + query = s.text(user_repo_delete_statement) + + result = connection.execute(query) + + assert len(controller.get_user_group_repos(group_id)) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_get_user_group_repos(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id =1 + user_id_2 = 2 + group_id = 1 + group_id_2 = 2 + rg_id = 1 + group_name = "test_group 1" + repo_ids = [1, 2, 3, 4, 5] + repo_urls = ["url1", "url2", "url3", "url4", "url5"] + + query_statements = [] + query_statements.append(clear_tables_statement) + + # add user with a group that has multiple repos + query_statements.append(get_user_insert_statement(user_id)) + query_statements.append(get_user_group_insert_statement(user_id, group_name, group_id)) + + # add user with a group that has no repos + query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) + + query_statements.append(get_repo_group_insert_statement(rg_id)) + + for i in range(0, len(repo_ids)): + query_statements.append(get_repo_insert_statement(repo_ids[i], rg_id, repo_urls[i])) + query_statements.append(get_user_repo_insert_statement(repo_ids[i], group_id)) + + connection.execute("".join(query_statements)) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + # test user with a group that has multiple repos + result = controller.get_user_repo_ids(user_id) + + assert len(result) == len(repo_ids) + assert set(result) == set(repo_ids) + + + # test user without any groups or repos + result = controller.get_user_repo_ids(user_id_2) + + assert len(result) == 0 + + query_statements.append(get_user_group_insert_statement(user_id_2, group_name, group_id_2)) + + + # test user with a group that doesn't have any repos + result = controller.get_user_repo_ids(user_id_2) + + assert len(result) == 0 + + with test_db_engine.connect() as connection: + + user_repo_delete_statement = get_user_repo_delete_statement() + query = s.text(user_repo_delete_statement) + + result = connection.execute(query) + + assert len(controller.get_user_group_repos(group_id)) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + + diff --git a/tests/test_applicaton/test_repo_load_controller/util.py b/tests/test_applicaton/test_repo_load_controller/util.py new file mode 100644 index 0000000000..b77cdb8bfe --- /dev/null +++ b/tests/test_applicaton/test_repo_load_controller/util.py @@ -0,0 +1,146 @@ +######## Helper Functions to Get Delete statements ################# + +def get_delete_statement(schema, table): + + return """DELETE FROM "{}"."{}";""".format(schema, table) + +def get_repo_delete_statement(): + + return get_delete_statement("augur_data", "repo") + +def get_repo_group_delete_statement(): + + return get_delete_statement("augur_data", "repo_groups") + +def get_user_delete_statement(): + + return get_delete_statement("augur_operations", "users") + +def get_user_repo_delete_statement(): + + return get_delete_statement("augur_operations", "user_repos") + +def get_user_group_delete_statement(): + + return get_delete_statement("augur_operations", "user_groups") + +def get_config_delete_statement(): + + return get_delete_statement("augur_operations", "config") + +def get_repo_related_delete_statements(table_list): + """Takes a list of tables related to the RepoLoadController class and generates a delete statement. + + Args: + table_list: list of table names. Valid table names are + "user_repos" or "user_repo", "repo" or "repos", "repo_groups" or "repo_group:, "user" or "users", and "config" + + """ + + query_list = [] + if "user_repos" in table_list or "user_repo" in table_list: + query_list.append(get_user_repo_delete_statement()) + + if "user_groups" in table_list or "user_group" in table_list: + query_list.append(get_user_group_delete_statement()) + + if "repos" in table_list or "repo" in table_list: + query_list.append(get_repo_delete_statement()) + + if "repo_groups" in table_list or "repo_group" in table_list: + query_list.append(get_repo_group_delete_statement()) + + if "users" in table_list or "user" in table_list: + query_list.append(get_user_delete_statement()) + + if "config" in table_list: + query_list.append(get_config_delete_statement()) + + return " ".join(query_list) + +######## Helper Functions to add github api keys from prod db to test db ################# +def add_keys_to_test_db(test_db_engine): + + row = None + section_name = "Keys" + setting_name = "github_api_key" + with DatabaseSession(logger) as session: + query = session.query(Config).filter(Config.section_name==section_name, Config.setting_name==setting_name) + row = execute_session_query(query, 'one') + + with DatabaseSession(logger, test_db_engine) as test_session: + new_row = Config(section_name=section_name, setting_name=setting_name, value=row.value, type="str") + test_session.add(new_row) + test_session.commit() + + +######## Helper Functions to get insert statements ################# + +def get_repo_insert_statement(repo_id, rg_id, repo_url="place holder url", repo_status="New"): + + return """INSERT INTO "augur_data"."repo" ("repo_id", "repo_group_id", "repo_git", "repo_path", "repo_name", "repo_added", "repo_status", "repo_type", "url", "owner_id", "description", "primary_language", "created_at", "forked_from", "updated_at", "repo_archived_date_collected", "repo_archived", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, {}, '{}', NULL, NULL, '2022-08-15 21:08:07', '{}', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'CLI', '1.0', 'Git', '2022-08-15 21:08:07');""".format(repo_id, rg_id, repo_url, repo_status) + +def get_repo_group_insert_statement(rg_id): + + return """INSERT INTO "augur_data"."repo_groups" ("repo_group_id", "rg_name", "rg_description", "rg_website", "rg_recache", "rg_last_modified", "rg_type", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, 'Default Repo Group', 'The default repo group created by the schema generation script', '', 0, '2019-06-03 15:55:20', 'GitHub Organization', 'load', 'one', 'git', '2019-06-05 13:36:25');""".format(rg_id) + +def get_user_insert_statement(user_id): + + return """INSERT INTO "augur_operations"."users" ("user_id", "login_name", "login_hashword", "email", "first_name", "last_name", "admin") VALUES ({}, 'bil', 'pass', 'b@gmil.com', 'bill', 'bob', false);""".format(user_id) + +def get_user_group_insert_statement(user_id, group_name, group_id=None): + + if group_id: + return """INSERT INTO "augur_operations"."user_groups" ("group_id", "user_id", "name") VALUES ({}, {}, '{}');""".format(group_id, user_id, group_name) + + return """INSERT INTO "augur_operations"."user_groups" (user_id", "name") VALUES (1, 'default');""".format(user_id, group_name) + + +######## Helper Functions to get retrieve data from tables ################# + +def get_repos(connection, where_string=None): + + query_list = [] + query_list.append('SELECT * FROM "augur_data"."repo"') + + if where_string: + if where_string.endswith(";"): + query_list.append(where_string[:-1]) + + query_list.append(where_string) + + query_list.append(";") + + query = s.text(" ".join(query_list)) + + return connection.execute(query).fetchall() + +def get_user_repos(connection): + + return connection.execute(s.text("""SELECT * FROM "augur_operations"."user_repos";""")).fetchall() + + +######## Helper Functions to get repos in an org ################# + +def get_org_repos(org_name, session): + + attempts = 0 + while attempts < 10: + result = hit_api(session.oauths, ORG_REPOS_ENDPOINT.format(org_name), logger) + + # if result is None try again + if not result: + attempts += 1 + continue + + response = result.json() + + if response: + return response + + return None + +def get_org_repo_count(org_name, session): + + repos = get_org_repos(org_name, session) + return len(repos) From dade8fe19a104d1cbdf3bbae372e60354986d8b1 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Wed, 18 Jan 2023 19:06:19 -0600 Subject: [PATCH 013/134] UPdating Dev with Main Changes (#2131) * Add bash in the container (#2112) The entrypoint.sh script use bash, running with podman return Jan 09 10:12:24 augur.osci.io podman[2004482]: /usr/bin/env: 'bash': No such file or directory Signed-off-by: Michael Scherer Signed-off-by: Michael Scherer * updated CONTRIBUTING.md (#2111) Signed-off-by: WhiteWolf47 Signed-off-by: WhiteWolf47 * my first commit Signed-off-by: Glowreeyah * Hardcode bash location Signed-off-by: Michael Scherer * Update README.md * Update metadata.py * Update README.md * Update metadata.py * Update README.md * Update metadata.py * Update README.md * Update metadata.py Signed-off-by: Michael Scherer Signed-off-by: WhiteWolf47 Signed-off-by: Glowreeyah Co-authored-by: mscherer Co-authored-by: ANURAG BHANDARI Co-authored-by: Glowreeyah Co-authored-by: Michael Scherer --- README.md | 4 ++-- metadata.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index cb28127a7b..64830bc4f8 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Augur NEW Release v0.43.8 +# Augur NEW Release v0.43.10 [![first-timers-only](https://img.shields.io/badge/first--timers--only-friendly-blue.svg?style=flat-square)](https://www.firsttimersonly.com/) We follow the [First Timers Only](https://www.firsttimersonly.com/) philosophy of tagging issues for first timers only, and walking one newcomer through the resolution process weekly. [You can find these issues tagged with "first timers only" on our issues list.](https://github.com/chaoss/augur/labels/first-timers-only). [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) [![Build Docker images](https://github.com/chaoss/augur/actions/workflows/build_docker.yml/badge.svg)](https://github.com/chaoss/augur/actions/workflows/build_docker.yml) [![Hits-of-Code](https://hitsofcode.com/github/chaoss/augur?branch=main)](https://hitsofcode.com/github/chaoss/augur/view?branch=main) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/2788/badge)](https://bestpractices.coreinfrastructure.org/projects/2788) @@ -6,7 +6,7 @@ ## NEW RELEASE ALERT! [If you want to jump right in, updated docker build/compose and bare metal installation instructions are available here](docs/new-install.md) -Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.43.8 +Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.43.10 - The `main` branch is a stable version of our new architecture, which features: - Dramatic improvement in the speed of large scale data collection (10,000+ repos). All data is obtained for 10k+ repos within a week - A new job management architecture that uses Celery and Redis to manage queues, and enables users to run a Flower job monitoring dashboard diff --git a/metadata.py b/metadata.py index 7cdba7a8a1..60ff7b1a50 100644 --- a/metadata.py +++ b/metadata.py @@ -5,8 +5,8 @@ __short_description__ = "Python 3 package for free/libre and open-source software community metrics, models & data collection" -__version__ = "0.43.8" -__release__ = "v0.43.8 (Love Shack 23)" +__version__ = "0.43.10" +__release__ = "v0.43.10 (Louise)" __license__ = "MIT" __copyright__ = "University of Missouri, University of Nebraska-Omaha, CHAOSS, Brian Warner & Augurlabs 2023" From 6be629291eb33e8a0537c89b17eb1c795c072d5f Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sat, 21 Jan 2023 07:59:22 -0600 Subject: [PATCH 014/134] Updates to RabbitMQ Model, additional tasks (#2130) * scaling fix for repo_move Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Change to rabbitmq broker Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * don't ignore result Signed-off-by: Isaac Milarsky * More logging in detect_github_repo_move Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * print Signed-off-by: Isaac Milarsky * re-add facade contributors to task queue Signed-off-by: Isaac Milarsky * better handling and logging files model Signed-off-by: Isaac Milarsky * take advantage of rabbitmq allowing us to use celery result Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * get rid of redundant definition Signed-off-by: Isaac Milarsky * docs update Signed-off-by: Isaac Milarsky * Change celery task scheduling to not scale proportionally to the amount of repos Signed-off-by: Isaac Milarsky * analysis sequence pooling for facade scaling Signed-off-by: Isaac Milarsky * need to fix issues with accessing redis Signed-off-by: Isaac Milarsky * don't create so many sessions Signed-off-by: Isaac Milarsky * Update Signed-off-by: Isaac Milarsky * doc update * fix facade date query error Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * updating MQ and REDIS Docs * Updates to docs. * documentation updatese * test * documentation updates * doc hell * trying * analyze_commits_in_parallel now shows progress in quarters in the logs. Also applied same scaling changes to facade contributor resolution in insert_facade_contributors Signed-off-by: Isaac Milarsky * sql format Signed-off-by: Isaac Milarsky * Typo Signed-off-by: Isaac Milarsky * skeleton for deps worker Signed-off-by: Isaac Milarsky * Better error handling Signed-off-by: Isaac Milarsky * add dependency util files from main-old Signed-off-by: Isaac Milarsky * Dependency worker Signed-off-by: Isaac Milarsky * add dependency model to repo_collect Signed-off-by: Isaac Milarsky * Syntax Signed-off-by: Isaac Milarsky * Facade tasks not getting ran for some reason Signed-off-by: Isaac Milarsky * add file Signed-off-by: Isaac Milarsky * python import Signed-off-by: Isaac Milarsky * make sure rabbitmq messages are cleared Signed-off-by: Isaac Milarsky * schedule less at once Signed-off-by: Isaac Milarsky * Grab correct vhost from config Signed-off-by: Isaac Milarsky * optimistic Signed-off-by: Isaac Milarsky * Change repo_collect to split up task load for smaller message Signed-off-by: Isaac Milarsky * version Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * Low load patch Signed-off-by: Isaac Milarsky * Shrink facade messages Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * increase message load Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky Co-authored-by: Isaac Milarsky Co-authored-by: Isaac Milarsky --- augur/application/cli/backend.py | 31 +- augur/application/db/util.py | 12 + augur/tasks/git/dependency_tasks/__init__.py | 0 augur/tasks/git/dependency_tasks/core.py | 64 + .../dependency_util/__init__.py | 0 .../dependency_util/c_deps.py | 18 + .../dependency_util/cpp_deps.py | 18 + .../dependency_util/csharp_deps.py | 16 + .../dependency_util/dependency_calculator.py | 45 + .../dependency_util/java_deps.py | 16 + .../dependency_util/javascript_deps.py | 16 + .../dependency_util/php_deps.py | 18 + .../dependency_util/python_deps.py | 16 + .../dependency_util/ruby_deps.py | 18 + .../dependency_util/vb_deps.py | 16 + augur/tasks/git/dependency_tasks/tasks.py | 24 + augur/tasks/git/facade_tasks.py | 266 +-- .../facade04postanalysiscleanup.py | 35 +- .../facade_worker/facade05repofetch.py | 42 +- augur/tasks/github/detect_move/tasks.py | 11 +- augur/tasks/github/events/tasks.py | 46 +- augur/tasks/github/issues/tasks.py | 39 +- augur/tasks/github/messages/tasks.py | 31 +- augur/tasks/github/pull_requests/tasks.py | 30 +- augur/tasks/github/releases/core.py | 2 +- augur/tasks/init/celery_app.py | 3 +- augur/tasks/start_tasks.py | 67 +- result.txt | 1598 +++++++++++++++++ setup.py | 2 + 29 files changed, 2280 insertions(+), 220 deletions(-) create mode 100644 augur/tasks/git/dependency_tasks/__init__.py create mode 100644 augur/tasks/git/dependency_tasks/core.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/__init__.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/c_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/cpp_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/csharp_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/dependency_calculator.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/java_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/javascript_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/php_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/python_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/ruby_deps.py create mode 100644 augur/tasks/git/dependency_tasks/dependency_util/vb_deps.py create mode 100644 augur/tasks/git/dependency_tasks/tasks.py create mode 100644 result.txt diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 37332435e2..3745dd6cf8 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -117,6 +117,11 @@ def start(disable_collection, development, port): try: clear_redis_caches() + connection_string = "" + with DatabaseSession(logger) as session: + connection_string = session.config.get_section("RabbitMQ")['connection_string'] + + clear_rabbitmq_messages(connection_string) except RedisConnectionError: pass @@ -127,19 +132,33 @@ def stop(): """ Sends SIGTERM to all Augur server & worker processes """ - _broadcast_signal_to_processes(given_logger=logging.getLogger("augur.cli")) + logger = logging.getLogger("augur.cli") + _broadcast_signal_to_processes(given_logger=logger) clear_redis_caches() + connection_string = "" + with DatabaseSession(logger) as session: + connection_string = session.config.get_section("RabbitMQ")['connection_string'] + + clear_rabbitmq_messages(connection_string) @cli.command('kill') def kill(): """ Sends SIGKILL to all Augur server & worker processes """ - _broadcast_signal_to_processes(broadcast_signal=signal.SIGKILL, given_logger=logging.getLogger("augur.cli")) + logger = logging.getLogger("augur.cli") + _broadcast_signal_to_processes(broadcast_signal=signal.SIGKILL, given_logger=logger) clear_redis_caches() + connection_string = "" + with DatabaseSession(logger) as session: + connection_string = session.config.get_section("RabbitMQ")['connection_string'] + + clear_rabbitmq_messages(connection_string) + + def clear_redis_caches(): """Clears the redis databases that celery and redis use.""" @@ -148,6 +167,14 @@ def clear_redis_caches(): subprocess.call(celery_purge_command.split(" ")) redis_connection.flushdb() +def clear_rabbitmq_messages(connection_string): + virtual_host_string = connection_string.split("/")[-1] + + logger.info("Clearing all messages from celery queue in rabbitmq") + rabbitmq_purge_command = f"sudo rabbitmqctl purge_queue celery -p {virtual_host_string}" + subprocess.call(rabbitmq_purge_command.split(" ")) + + @cli.command('export-env') def export_env(config): """ diff --git a/augur/application/db/util.py b/augur/application/db/util.py index c2b2500721..544a355ae2 100644 --- a/augur/application/db/util.py +++ b/augur/application/db/util.py @@ -38,5 +38,17 @@ def execute_session_query(query, query_type="all"): +def convert_orm_list_to_dict_list(result): + new_list = [] + for row in result: + row_dict = row.__dict__ + try: + del row_dict['_sa_instance_state'] + except: + pass + + new_list.append(row_dict) + + return new_list diff --git a/augur/tasks/git/dependency_tasks/__init__.py b/augur/tasks/git/dependency_tasks/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/augur/tasks/git/dependency_tasks/core.py b/augur/tasks/git/dependency_tasks/core.py new file mode 100644 index 0000000000..41e11b2409 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/core.py @@ -0,0 +1,64 @@ +from datetime import datetime +import logging +import requests +import json +from augur.application.db.data_parse import * +from augur.application.db.models import * +from augur.application.db.session import DatabaseSession +from augur.tasks.init.celery_app import engine +from augur.application.db.util import execute_session_query +from augur.tasks.git.dependency_tasks.dependency_util import dependency_calculator as dep_calc + +def generate_deps_data(session, repo_id, path): + """Runs scc on repo and stores data in database + :param repo_id: Repository ID + :param path: Absolute path of the Repostiory + """ + session.logger.info('Searching for deps in repo') + session.logger.info(f'Repo ID: {repo_id}, Path: {path}') + + deps = dep_calc.get_deps(path) + try: + for dep in deps: + repo_deps = { + 'repo_id': repo_id, + 'dep_name' : dep.name, + 'dep_count' : dep.count, + 'dep_language' : dep.language, + 'tool_source': 'deps_model', + 'tool_version': '0.43.9', + 'data_source': 'Git', + 'data_collection_date': datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') + } + + insert_statement = s.sql.text(""" + INSERT INTO "repo_dependencies" ("repo_id", "dep_name", "dep_count", "dep_language", "tool_source", "tool_version", "data_source", "data_collection_date") + VALUES (:repo_id, :dep_name, :dep_count, :dep_language, :tool_source, :tool_version, :data_source, :data_collection_date) + """).bindparams(**repo_deps) + + #result = self.db.execute(self.repo_dependencies_table.insert().values(repo_deps)) + session.execute_sql(insert_statement) + except Exception as e: + session.logger.error(f"Could not complete generate_deps_data!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + + +def deps_model(session, repo_id): + """ Data collection and storage method + """ + session.logger.info(f"This is the deps model repo: {repo_id}.") + + repo_path_sql = s.sql.text(""" + SELECT repo_id, CONCAT(repo_group_id || chr(47) || repo_path || repo_name) AS path + FROM repo + WHERE repo_id = :repo_id + """).bindparams(repo_id=repo_id) + + result = session.execute_sql(repo_path_sql) + + relative_repo_path = result.fetchone()[1] + absolute_repo_path = session.config.get_section("Facade")['repo_directory'] + relative_repo_path + + try: + generate_deps_data(session,repo_id, absolute_repo_path) + except Exception as e: + session.logger.error(f"Could not complete deps_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") \ No newline at end of file diff --git a/augur/tasks/git/dependency_tasks/dependency_util/__init__.py b/augur/tasks/git/dependency_tasks/dependency_util/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/augur/tasks/git/dependency_tasks/dependency_util/c_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/c_deps.py new file mode 100644 index 0000000000..51a375058b --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/c_deps.py @@ -0,0 +1,18 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.c')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("#include\s*<(\w*)>", f.read()) + f.seek(0) + matches.extend(re.findall('#include\s*"(\w*)"', f.read())) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/cpp_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/cpp_deps.py new file mode 100644 index 0000000000..28a54fdf74 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/cpp_deps.py @@ -0,0 +1,18 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.cpp')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("#include\s*<(\w*)>", f.read()) + f.seek(0) + matches.extend(re.findall('#include\s*"(\w*)"', f.read())) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/csharp_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/csharp_deps.py new file mode 100644 index 0000000000..342903e9d6 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/csharp_deps.py @@ -0,0 +1,16 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.cs')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("using\s*(\w*)\s*;", f.read()) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/dependency_calculator.py b/augur/tasks/git/dependency_tasks/dependency_util/dependency_calculator.py new file mode 100644 index 0000000000..ae6a680652 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/dependency_calculator.py @@ -0,0 +1,45 @@ +from augur.tasks.git.dependency_tasks.dependency_util import python_deps +from augur.tasks.git.dependency_tasks.dependency_util import ruby_deps +from augur.tasks.git.dependency_tasks.dependency_util import php_deps +from augur.tasks.git.dependency_tasks.dependency_util import javascript_deps +from augur.tasks.git.dependency_tasks.dependency_util import vb_deps +from augur.tasks.git.dependency_tasks.dependency_util import csharp_deps +from augur.tasks.git.dependency_tasks.dependency_util import java_deps +from augur.tasks.git.dependency_tasks.dependency_util import cpp_deps +from augur.tasks.git.dependency_tasks.dependency_util import c_deps +from augur.tasks.git.dependency_tasks.dependency_util import dependency_calculator + +class Dep: + def __init__(self, name, language, count): + self.name = name + self.language = language + self.count = count + def __repr__(self): + return f'Dep(name={self.name}, language={self.language}, count={self.count})' + +def get_deps(path): + deps = [] + deps.extend(get_language_deps(path, python_deps, 'python')) + deps.extend(get_language_deps(path, ruby_deps, 'ruby')) + deps.extend(get_language_deps(path, php_deps, 'php')) + deps.extend(get_language_deps(path, javascript_deps, 'javascript')) + deps.extend(get_language_deps(path, vb_deps, 'visual basic')) + deps.extend(get_language_deps(path, csharp_deps, 'C#')) + deps.extend(get_language_deps(path, java_deps, 'java')) + deps.extend(get_language_deps(path, cpp_deps, 'C++')) + deps.extend(get_language_deps(path, c_deps, 'C')) + return deps + +def get_language_deps(path, language, name): + files = language.get_files(path) + deps_map = {} + for f in files: + f_deps = language.get_deps_for_file(f) + if f_deps is None: + continue + for dep in f_deps: + if dep in deps_map: + deps_map[dep].count += 1 + else: + deps_map[dep] = Dep(dep, name, 1) + return list(deps_map.values()) diff --git a/augur/tasks/git/dependency_tasks/dependency_util/java_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/java_deps.py new file mode 100644 index 0000000000..b55c357e3a --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/java_deps.py @@ -0,0 +1,16 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.java')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("import\s*(\w*)\s*;", f.read()) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/javascript_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/javascript_deps.py new file mode 100644 index 0000000000..fda86bd4ec --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/javascript_deps.py @@ -0,0 +1,16 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.js')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("import\s*(\w*)", f.read()) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/php_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/php_deps.py new file mode 100644 index 0000000000..690c06fbcf --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/php_deps.py @@ -0,0 +1,18 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.php')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("include\s*'(.*)';", f.read()) + f.seek(0) + matches.extend(re.findall('include\s*"(.*)";', f.read())) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py new file mode 100644 index 0000000000..58f83ba7ac --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py @@ -0,0 +1,16 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.py')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("import\s*(\w*)", f.read()) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/ruby_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/ruby_deps.py new file mode 100644 index 0000000000..802a8951f1 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/ruby_deps.py @@ -0,0 +1,18 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.rb')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall('require\s*"(.*)"', f.read()) + f.seek(0) + matches = re.findall('require_relative\s*"(.*)"', f.read()) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/dependency_util/vb_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/vb_deps.py new file mode 100644 index 0000000000..e3ed8e7845 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/dependency_util/vb_deps.py @@ -0,0 +1,16 @@ +import sys +import re +from pathlib import Path + +def get_files(path): + #copied from example on https://docs.python.org/3/library/pathlib.html + dir = path + p = Path(dir) + files = list(p.glob('**/*.vb')) + return files + +def get_deps_for_file(path): + f = open(path, 'r') + matches = re.findall("Imports\s*(.*)", f.read()) + f.close() + return matches diff --git a/augur/tasks/git/dependency_tasks/tasks.py b/augur/tasks/git/dependency_tasks/tasks.py new file mode 100644 index 0000000000..9dcb8b3463 --- /dev/null +++ b/augur/tasks/git/dependency_tasks/tasks.py @@ -0,0 +1,24 @@ +import logging +import traceback +from augur.application.db.session import DatabaseSession +from augur.tasks.git.dependency_tasks.core import * +from augur.tasks.init.celery_app import celery_app as celery +from augur.application.db.util import execute_session_query + + +@celery.task +def process_dependency_metrics(repo_git_identifiers): + #raise NotImplementedError + + logger = logging.getLogger(process_dependency_metrics.__name__) + + session = DatabaseSession(logger) + + for repo_git in repo_git_identifiers: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query,'one') + + try: + deps_model(session, repo.repo_id) + except Exception as e: + session.logger.error(f"Could not complete deps_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") \ No newline at end of file diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index bcb329554d..d02a3cf19f 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -129,10 +129,12 @@ def update_analysis_log(repos_id,status): logger.info(f"Got past repo {repo_id}") @celery.task -def trim_commits_post_analysis_facade_task(commits): +def trim_commits_post_analysis_facade_task(repo_ids): logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) + session = FacadeSession(logger) + start_date = session.get_setting('start_date') def update_analysis_log(repos_id,status): # Log a repo's analysis status @@ -143,24 +145,72 @@ def update_analysis_log(repos_id,status): session.execute_sql(log_message) - repo_ids = [] + for repo_id in repo_ids: + session.logger.info(f"Generating sequence for repo {repo_id}") - - - for commit in commits: - repo_id = commit[1] - if repo_id not in repo_ids: - update_analysis_log(repo_id,'Data collection complete') + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') + + #Get the huge list of commits to process. + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + # Grab the parents of HEAD + + parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " + "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], + stdout=subprocess.PIPE, shell=True) + + parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) + + # If there are no commits in the range, we still get a blank entry in + # the set. Remove it, as it messes with the calculations + + if '' in parent_commits: + parent_commits.remove('') + + # Grab the existing commits from the database + + existing_commits = set() + + find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id + """).bindparams(repo_id=repo_id) + + #session.cfg.cursor.execute(find_existing, (repo[0], )) + + try: + for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): + existing_commits.add(commit['cmt_commit_hash']) + except: + session.log_activity('Info', 'list(cfg.cursor) returned an error') + + # Find missing commits and add them + + missing_commits = parent_commits - existing_commits - update_analysis_log(repo_id,'Beginning to trim commits') + session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") + + if len(missing_commits) > 0: + #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) + + #encode the repo_id with the commit. + commits_with_repo_tuple = [(commit,repo_id) for commit in list(missing_commits)] + #Get all missing commits into one large list to split into task pools + all_missing_commits.extend(commits_with_repo_tuple) + + # Find commits which are out of the analysis range + + trimmed_commits = existing_commits - parent_commits + + update_analysis_log(repo_id,'Data collection complete') - session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(commits)}") + update_analysis_log(repo_id,'Beginning to trim commits') - repo_ids.append(repo_id) + session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") - trim_commit(session,repo_id,commit[0]) - for repo_id in repo_ids: + + for commit in trimmed_commits: + trim_commit(session,repo_id,commit) + set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' """).bindparams(repo_id=repo_id) @@ -169,6 +219,8 @@ def update_analysis_log(repos_id,status): update_analysis_log(repo_id,'Commit trimming complete') update_analysis_log(repo_id,'Complete') + + @celery.task def facade_analysis_end_facade_task(): @@ -187,30 +239,88 @@ def facade_start_contrib_analysis_task(): #enable celery multithreading @celery.task -def analyze_commits_in_parallel(queue: list, multithreaded: bool)-> None: +def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: """Take a large list of commit data to analyze and store in the database. Meant to be run in parallel with other instances of this task. """ #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) - - logger.info(f"Got to analysis!") session = FacadeSession(logger) - for count, commitTuple in enumerate(queue): + start_date = session.get_setting('start_date') + + for repo_id in repo_ids: + session.logger.info(f"Generating sequence for repo {repo_id}") + + + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') + + #Get the huge list of commits to process. + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + # Grab the parents of HEAD + + parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " + "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], + stdout=subprocess.PIPE, shell=True) + + parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) + + # If there are no commits in the range, we still get a blank entry in + # the set. Remove it, as it messes with the calculations + + if '' in parent_commits: + parent_commits.remove('') + + # Grab the existing commits from the database + + existing_commits = set() + + find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id + """).bindparams(repo_id=repo_id) + + #session.cfg.cursor.execute(find_existing, (repo[0], )) + + try: + for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): + existing_commits.add(commit['cmt_commit_hash']) + except: + session.log_activity('Info', 'list(cfg.cursor) returned an error') + + # Find missing commits and add them + + missing_commits = parent_commits - existing_commits + + session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") + + queue = [] + if len(missing_commits) > 0: + #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) + + #encode the repo_id with the commit. + commits = [commit for commit in list(missing_commits)] + #Get all missing commits into one large list to split into task pools + queue.extend(commits) + else: + return + + logger.info(f"Got to analysis!") - #Log progress when another quarter of the queue has been processed - if (count + 1) % int(len(queue) / 4) == 0: - logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") + for count, commitTuple in enumerate(queue): - query = session.query(Repo).filter(Repo.repo_id == commitTuple[1]) - repo = execute_session_query(query,'one') + #Log progress when another quarter of the queue has been processed + if (count + 1) % int(len(queue) / 4) == 0: + logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query,'one') - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - analyze_commit(session, commitTuple[1], repo_loc, commitTuple[0]) + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + + analyze_commit(session, repo_id, repo_loc, commitTuple) logger.info("Analysis complete") + return @celery.task def nuke_affiliations_facade_task(): @@ -241,7 +351,7 @@ def rebuild_unknown_affiliation_and_web_caches_facade_task(): -def generate_analysis_sequence(logger): +def generate_analysis_sequence(logger,repo_git_identifiers): """Run the analysis by looping over all active repos. For each repo, we retrieve the list of commits which lead to HEAD. If any are missing from the database, they are filled in. Then we check to see if any commits in the database are @@ -251,88 +361,34 @@ def generate_analysis_sequence(logger): is interrupted (possibly leading to partial data in the database for the commit being analyzed at the time) we can recover. """ + + + analysis_sequence = [] with FacadeSession(logger) as session: - repo_list = s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo """) + repo_list = s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo + WHERE repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) repos = session.fetchall_data_from_sql_text(repo_list) start_date = session.get_setting('start_date') repo_ids = [repo['repo_id'] for repo in repos] - analysis_sequence.append(facade_analysis_init_facade_task.si()) - - analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=grab_comitters)) + #determine amount of celery tasks to run at once in each grouped task load + concurrentTasks = int((-1 * (15/(len(repo_ids)+1))) + 15) + logger.info(f"Scheduling concurrent layers {concurrentTasks} tasks at a time.") - analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=trim_commits_facade_task)) - - all_missing_commits = [] - all_trimmed_commits = [] - - - for repo in repos: - session.logger.info(f"Generating sequence for repo {repo['repo_id']}") - - - #Get the huge list of commits to process. - repo_loc = (f"{session.repo_base_directory}{repo['repo_group_id']}/{repo['repo_path']}{repo['repo_name']}/.git") - # Grab the parents of HEAD - - parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " - "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], - stdout=subprocess.PIPE, shell=True) - - parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) - - # If there are no commits in the range, we still get a blank entry in - # the set. Remove it, as it messes with the calculations - - if '' in parent_commits: - parent_commits.remove('') - - # Grab the existing commits from the database - - existing_commits = set() - - find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id - """).bindparams(repo_id=repo['repo_id']) - - #session.cfg.cursor.execute(find_existing, (repo[0], )) - - try: - for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): - existing_commits.add(commit['cmt_commit_hash']) - except: - session.log_activity('Info', 'list(cfg.cursor) returned an error') - - # Find missing commits and add them - - missing_commits = parent_commits - existing_commits - - session.log_activity('Debug',f"Commits missing from repo {repo['repo_id']}: {len(missing_commits)}") - - if len(missing_commits) > 0: - #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) + analysis_sequence.append(facade_analysis_init_facade_task.si()) - #encode the repo_id with the commit. - commits_with_repo_tuple = [(commit,repo['repo_id']) for commit in list(missing_commits)] - #Get all missing commits into one large list to split into task pools - all_missing_commits.extend(commits_with_repo_tuple) - - # Find commits which are out of the analysis range + analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=grab_comitters,processes=concurrentTasks)) - trimmed_commits = existing_commits - parent_commits + analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=trim_commits_facade_task,processes=concurrentTasks)) - trimmed_commits_with_repo_tuple = [(commit,repo['repo_id']) for commit in list(trimmed_commits)] - all_trimmed_commits.extend(trimmed_commits_with_repo_tuple) - + analysis_sequence.append(create_grouped_task_load(True,dataList=repo_ids,task=analyze_commits_in_parallel,processes=concurrentTasks)) - if all_missing_commits: - analysis_sequence.append(create_grouped_task_load(True,dataList=all_missing_commits,task=analyze_commits_in_parallel)) + analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=trim_commits_post_analysis_facade_task,processes=concurrentTasks)) - if all_trimmed_commits: - analysis_sequence.append(create_grouped_task_load(dataList=all_trimmed_commits,task=trim_commits_post_analysis_facade_task)) analysis_sequence.append(facade_analysis_end_facade_task.si()) @@ -341,14 +397,15 @@ def generate_analysis_sequence(logger): -def generate_contributor_sequence(logger): +def generate_contributor_sequence(logger,repo_git_identifiers): contributor_sequence = [] all_repo_ids = [] with FacadeSession(logger) as session: #contributor_sequence.append(facade_start_contrib_analysis_task.si()) - query = s.sql.text("""SELECT repo_id FROM repo""") + query = s.sql.text("""SELECT repo_id FROM repo + WHERE repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) all_repos = session.fetchall_data_from_sql_text(query) #pdb.set_trace() @@ -364,9 +421,10 @@ def generate_contributor_sequence(logger): -def generate_facade_chain(logger): +def generate_facade_chain(logger,repo_git_identifiers): #raise NotImplemented + logger.info("Generating facade sequence") with FacadeSession(logger) as session: # Figure out what we need to do @@ -392,28 +450,28 @@ def generate_facade_chain(logger): facade_sequence = [] if not limited_run or (limited_run and delete_marked_repos): - git_repo_cleanup(session) + git_repo_cleanup(session,repo_git_identifiers) if not limited_run or (limited_run and clone_repos): - git_repo_initialize(session) + git_repo_initialize(session,repo_git_identifiers) if not limited_run or (limited_run and check_updates): - check_for_repo_updates(session) + check_for_repo_updates(session,repo_git_identifiers) if force_updates: - force_repo_updates(session)#facade_sequence.append(force_repo_updates_facade_task.si()) + force_repo_updates(session,repo_git_identifiers)#facade_sequence.append(force_repo_updates_facade_task.si()) if not limited_run or (limited_run and pull_repos): - git_repo_updates(session)#facade_sequence.append(git_repo_updates_facade_task.si()) + git_repo_updates(session,repo_git_identifiers)#facade_sequence.append(git_repo_updates_facade_task.si()) if force_analysis: - force_repo_analysis(session)#facade_sequence.append(force_repo_analysis_facade_task.si()) + force_repo_analysis(session,repo_git_identifiers)#facade_sequence.append(force_repo_analysis_facade_task.si()) #Generate commit analysis task order. - facade_sequence.extend(generate_analysis_sequence(logger)) + facade_sequence.extend(generate_analysis_sequence(logger,repo_git_identifiers)) #Generate contributor analysis task group. - facade_sequence.append(generate_contributor_sequence(logger)) + facade_sequence.append(generate_contributor_sequence(logger,repo_git_identifiers)) if nuke_stored_affiliations: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) @@ -428,6 +486,6 @@ def generate_facade_chain(logger): if not limited_run or (limited_run and rebuild_caches): facade_sequence.append(rebuild_unknown_affiliation_and_web_caches_facade_task.si().on_error(facade_error_handler.s()))#rebuild_unknown_affiliation_and_web_caches(session.cfg) - #logger.info(f"Facade sequence: {facade_sequence}") + logger.info(f"Facade sequence: {facade_sequence}") return chain(*facade_sequence) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py b/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py index e61f6c163c..7c61ac4fdc 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py @@ -37,8 +37,10 @@ import xlsxwriter import configparser import sqlalchemy as s +from augur.application.db.util import execute_session_query +from augur.application.db.models import * -def git_repo_cleanup(session): +def git_repo_cleanup(session,repo_git_identifiers): # Clean up any git repos that are pending deletion @@ -47,23 +49,24 @@ def git_repo_cleanup(session): session.log_activity('Info','Processing deletions') - query = s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo WHERE repo_status='Delete'""") + query = session.query(Repo).filter( + Repo.repo_git.in_(repo_git_identifiers),Repo.repo_status == "Delete")#s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo WHERE repo_status='Delete'""") - delete_repos = session.fetchall_data_from_sql_text(query) + delete_repos = execute_session_query(query,'all')#session.fetchall_data_from_sql_text(query) for row in delete_repos: # Remove the files on disk cmd = ("rm -rf %s%s/%s%s" - % (session.repo_base_directory,row['repo_group_id'],row['repo_path'],row['repo_name'])) + % (session.repo_base_directory,row.repo_group_id,row.repo_path,row.repo_name)) return_code = subprocess.Popen([cmd],shell=True).wait() # Remove the analysis data remove_commits = s.sql.text("""DELETE FROM commits WHERE repo_id=:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(remove_commits) optimize_table = s.sql.text("""OPTIMIZE TABLE commits""") @@ -72,21 +75,21 @@ def git_repo_cleanup(session): # Remove cached repo data remove_dm_repo_weekly = s.sql.text("""DELETE FROM dm_repo_weekly WHERE repo_id=:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(remove_dm_repo_weekly) optimize_table = s.sql.text("""OPTIMIZE TABLE dm_repo_weekly""") session.execute_sql(optimize_table) remove_dm_repo_monthly = s.sql.text("""DELETE FROM dm_repo_monthly WHERE repo_id=:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(remove_dm_repo_monthly) optimize_table = s.sql.text("""OPTIMIZE TABLE dm_repo_monthly""") session.execute_sql(optimize_table) remove_dm_repo_annual = s.sql.text("""DELETE FROM dm_repo_annual WHERE repo_id=:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(remove_dm_repo_annual) optimize_table = s.sql.text("""OPTIMIZE TABLE dm_repo_annual""") @@ -95,29 +98,29 @@ def git_repo_cleanup(session): # Set project to be recached if just removing a repo set_project_recache = s.sql.text("""UPDATE projects SET recache=TRUE - WHERE id=:repo_group_id""").bindparams(repo_group_id=row['repo_group_id']) + WHERE id=:repo_group_id""").bindparams(repo_group_id=row.repo_group_id) session.execute_sql(set_project_recache) # Remove the entry from the repos table query = s.sql.text("""DELETE FROM repo WHERE repo_id=:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(query) #log_activity('Verbose','Deleted repo %s' % row[0]) - #session.logger.debug(f"Deleted repo {row['repo_id']}") - session.log_activity('Verbose',f"Deleted repo {row['repo_id']}") - cleanup = '%s/%s%s' % (row['repo_group_id'],row['repo_path'],row['repo_name']) + #session.logger.debug(f"Deleted repo {row.repo_id}") + session.log_activity('Verbose',f"Deleted repo {row.repo_id}") + cleanup = '%s/%s%s' % (row.repo_group_id,row.repo_path,row.repo_name) # Remove any working commits remove_working_commits = s.sql.text("""DELETE FROM working_commits WHERE repos_id=:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(remove_working_commits) # Remove the repo from the logs remove_logs = s.sql.text("""DELETE FROM repos_fetch_log WHERE repos_id =:repo_id - """).bindparams(repo_id=row['repo_id']) + """).bindparams(repo_id=row.repo_id) session.execute_sql(remove_logs) @@ -136,7 +139,7 @@ def git_repo_cleanup(session): session.log_activity('Verbose',f"Attempted {cmd}") #update_repo_log(row[0],'Deleted') - session.update_repo_log(row['repo_id'],'Deleted') + session.update_repo_log(row.repo_id,'Deleted') # Clean up deleted projects diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index acbe0b6dae..aa23a18ad7 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -39,17 +39,19 @@ import sqlalchemy as s from .facade02utilitymethods import update_repo_log, trim_commit, store_working_author, trim_author from augur.application.db.models.augur_data import * -from augur.application.db.util import execute_session_query +from augur.application.db.util import execute_session_query, convert_orm_list_to_dict_list -def git_repo_initialize(session, repo_group_id=None): +def git_repo_initialize(session, repo_git_identifiers,repo_group_id=None): + # Select any new git repos so we can set up their locations and git clone # Select any new git repos so we can set up their locations and git clone new_repos = [] if repo_group_id is None: session.update_status('Fetching non-cloned repos') session.log_activity('Info','Fetching non-cloned repos') - query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git FROM repo WHERE repo_status LIKE 'New%'""") + query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git FROM repo WHERE repo_status LIKE 'New%' + AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) #Get data as a list of dicts @@ -188,7 +190,7 @@ def git_repo_initialize(session, repo_group_id=None): session.log_activity('Info', f"Fetching new repos (complete)") -def check_for_repo_updates(session): +def check_for_repo_updates(session,repo_git_identifiers): @@ -202,10 +204,13 @@ def check_for_repo_updates(session): get_initialized_repos = s.sql.text("""SELECT repo_id FROM repo WHERE repo_status NOT LIKE 'New%' AND repo_status != 'Delete' - AND repo_status != 'Analyze' AND repo_status != 'Empty'""") - + AND repo_status != 'Analyze' AND repo_status != 'Empty' + AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + repos = session.fetchall_data_from_sql_text(get_initialized_repos)#list(cfg.cursor) + + for repo in repos: # Figure out which repos have been updated within the waiting period @@ -242,7 +247,8 @@ def check_for_repo_updates(session): SELECT repo.ctid FROM repo LEFT JOIN repo a ON repo.repo_group_id=a.repo_group_id AND repo.repo_status='Update' AND repo.repo_status != 'Analyze' - AND repo.repo_status != 'Empty')""") + AND repo.repo_status != 'Empty') + AND repo.repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) # ("UPDATE repos r LEFT JOIN repos s ON r.projects_id=s.projects_id " # "SET r.status='Update' WHERE s.status='Update' AND " @@ -253,7 +259,7 @@ def check_for_repo_updates(session): session.log_activity('Info','Checking repos to update (complete)') -def force_repo_updates(session): +def force_repo_updates(session,repo_git_identifiers): # Set the status of all non-new repos to "Update". @@ -261,12 +267,13 @@ def force_repo_updates(session): session.log_activity('Info','Forcing repos to update') get_repo_ids = s.sql.text("""UPDATE repo SET repo_status='Update' WHERE repo_status - NOT LIKE 'New%' AND repo_status!='Delete' AND repo_status !='Empty'""") + NOT LIKE 'New%' AND repo_status!='Delete' AND repo_status !='Empty' + AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) session.execute_sql(get_repo_ids) session.log_activity('Info','Forcing repos to update (complete)') -def force_repo_analysis(session): +def force_repo_analysis(session,repo_git_identifiers): # Set the status of all non-new repos to "Analyze". @@ -274,25 +281,28 @@ def force_repo_analysis(session): session.log_activity('Info','Forcing repos to be analyzed') set_to_analyze = s.sql.text("""UPDATE repo SET repo_status='Analyze' WHERE repo_status - NOT LIKE 'New%' AND repo_status!='Delete' AND repo_status != 'Empty'""") + NOT LIKE 'New%' AND repo_status!='Delete' AND repo_status != 'Empty' + AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) session.execute_sql(set_to_analyze) session.log_activity('Info','Forcing repos to be analyzed (complete)') -def git_repo_updates(session): +def git_repo_updates(session,repo_git_identifiers): # Update existing repos session.update_status('Updating repos') session.log_activity('Info','Updating existing repos') - query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git,repo_name,repo_path FROM repo WHERE - repo_status='Update'""") - + #query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git,repo_name,repo_path FROM repo WHERE + # repo_status='Update'""") + query = query = session.query(Repo).filter( + Repo.repo_git.in_(repo_git_identifiers),Repo.repo_status == 'Update') + result = execute_session_query(query, 'all') - existing_repos = session.fetchall_data_from_sql_text(query)#list(cfg.cursor) + existing_repos = convert_orm_list_to_dict_list(result)#session.fetchall_data_from_sql_text(query)#list(cfg.cursor) for row in existing_repos: session.log_activity('Verbose',f"Attempting to update {row['repo_git']}")#['git']) diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index f47d800b82..dfc5cee9e3 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -14,7 +14,10 @@ def detect_github_repo_move(repo_git_identifiers : [str]) -> None: #Ping each repo with the given repo_git to make sure #that they are still in place. for repo_git in repo_git_identifiers: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - logger.info(f"Pinging repo: {repo_git}") - ping_github_for_repo_move(session, repo) \ No newline at end of file + try: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + logger.info(f"Pinging repo: {repo_git}") + ping_github_for_repo_move(session, repo) + except Exception as e: + logger.error(f"Could not check repo source for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") \ No newline at end of file diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index cb8c175e91..5459788dc1 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -21,27 +21,31 @@ def collect_events(repo_git_identifiers: [str]): for repo_git in repo_git_identifiers: - # define GithubTaskSession to handle insertions, and store oauth keys - with GithubTaskSession(logger) as session: - - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_id = repo_obj.repo_id - - owner, repo = get_owner_repo(repo_git) - - logger.info(f"Collecting Github events for {owner}/{repo}") - - url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" - - event_data = retrieve_all_event_data(repo_git, logger) - - if event_data: - - process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) - - else: - logger.info(f"{owner}/{repo} has no events") + + try: + # define GithubTaskSession to handle insertions, and store oauth keys + with GithubTaskSession(logger) as session: + + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id + + owner, repo = get_owner_repo(repo_git) + + logger.info(f"Collecting Github events for {owner}/{repo}") + + url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" + + event_data = retrieve_all_event_data(repo_git, logger) + + if event_data: + + process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no events") + except Exception as e: + logger.error(f"Could not collect events for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_event_data(repo_git: str, logger): diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index 83dbbb02bb..f33459795e 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -21,26 +21,29 @@ def collect_issues(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_issues.__name__) - - for repo_git in repo_git_identifiers: - owner, repo = get_owner_repo(repo_git) - # define GithubTaskSession to handle insertions, and store oauth keys - with GithubTaskSession(logger) as session: - - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_id = repo_obj.repo_id + for repo_git in repo_git_identifiers: + try: + owner, repo = get_owner_repo(repo_git) + + # define GithubTaskSession to handle insertions, and store oauth keys + with GithubTaskSession(logger) as session: + + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id + + + issue_data = retrieve_all_issue_data(repo_git, logger) + + if issue_data: - - issue_data = retrieve_all_issue_data(repo_git, logger) - - if issue_data: - - process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) - - else: - logger.info(f"{owner}/{repo} has no issues") + process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no issues") + except Exception as e: + logger.error(f"Could not collect issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_issue_data(repo_git, logger) -> None: diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index 89ea3e1c6e..f676d28c60 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -22,20 +22,23 @@ def collect_github_messages(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_github_messages.__name__) for repo_git in repo_git_identifiers: - with GithubTaskSession(logger, engine) as session: - - repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id - - owner, repo = get_owner_repo(repo_git) - message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) - - if message_data: - - process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) - - else: - logger.info(f"{owner}/{repo} has no messages") + try: + with GithubTaskSession(logger, engine) as session: + + repo_id = session.query(Repo).filter( + Repo.repo_git == repo_git).one().repo_id + + owner, repo = get_owner_repo(repo_git) + message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) + + if message_data: + + process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no messages") + except Exception as e: + logger.error(f"Could not collect github messages for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 2b3383bcc7..961a2ad4cd 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -1,6 +1,6 @@ import time import logging - +import traceback from augur.tasks.github.pull_requests.core import extract_data_from_pr_list from augur.tasks.init.celery_app import celery_app as celery, engine @@ -22,18 +22,22 @@ def collect_pull_requests(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_pull_requests.__name__) for repo_git in repo_git_identifiers: - with GithubTaskSession(logger, engine) as session: - - repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id - - owner, repo = get_owner_repo(repo_git) - pr_data = retrieve_all_pr_data(repo_git, logger) - - if pr_data: - process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no pull requests") + try: + + with GithubTaskSession(logger, engine) as session: + + repo_id = session.query(Repo).filter( + Repo.repo_git == repo_git).one().repo_id + + owner, repo = get_owner_repo(repo_git) + pr_data = retrieve_all_pr_data(repo_git, logger) + + if pr_data: + process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) + else: + logger.info(f"{owner}/{repo} has no pull requests") + except Exception as e: + logger.error(f"Could not collect pull requests for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") # TODO: Rename pull_request_reviewers table to pull_request_requested_reviewers diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 093a899a02..4267264ebf 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -48,7 +48,7 @@ def get_release_inf(session, repo_id, release, tag_only): name = release['target']['tagger']['name'] else: name = "" - if 'email' in release['target']['tagger']: + if 'email' in release['target']['tagger'] and release['target']['tagger']['email']: email = '_' + release['target']['tagger']['email'] else: email = "" diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 457c913184..8df5bea1e1 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -31,7 +31,8 @@ 'augur.tasks.github.pull_requests.files_model.tasks', 'augur.tasks.github.pull_requests.commits_model.tasks'] -git_tasks = ['augur.tasks.git.facade_tasks'] +git_tasks = ['augur.tasks.git.facade_tasks', + 'augur.tasks.git.dependency_tasks.tasks'] data_analysis_tasks = ['augur.tasks.data_analysis.message_insights.tasks', 'augur.tasks.data_analysis.clustering_worker.tasks', diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 561b3005ce..b5f9ccc667 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -5,7 +5,8 @@ import json import os from enum import Enum - +import math +import numpy as np #from celery.result import AsyncResult from celery import signature from celery import group, chain, chord, signature @@ -19,6 +20,7 @@ from augur.tasks.github.repo_info.tasks import collect_repo_info from augur.tasks.github.pull_requests.files_model.tasks import process_pull_request_files from augur.tasks.github.pull_requests.commits_model.tasks import process_pull_request_commits +from augur.tasks.git.dependency_tasks.tasks import process_dependency_metrics from augur.tasks.git.facade_tasks import * from augur.tasks.db.refresh_materialized_views import * # from augur.tasks.data_analysis import * @@ -58,6 +60,9 @@ def repo_collect_phase(): issue_dependent_tasks = [] #repo_info should run in a group repo_info_tasks = [] + + np_clustered_array = [] + #A chain is needed for each repo. with DatabaseSession(logger) as session: query = session.query(Repo) @@ -65,35 +70,77 @@ def repo_collect_phase(): all_repo_git_identifiers = [repo.repo_git for repo in repos] + #Cluster each repo in groups of 5. + np_clustered_array = np.array_split(all_repo_git_identifiers,math.ceil(len(all_repo_git_identifiers)/50)) + first_pass = np_clustered_array.pop(0).tolist() + + logger.info(f"Scheduling groups of {len(first_pass)}") #Pool the tasks for collecting repo info. - repo_info_tasks = create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_repo_info).tasks + repo_info_tasks = create_grouped_task_load(dataList=first_pass, task=collect_repo_info).tasks #pool the repo collection jobs that should be ran first and have deps. primary_repo_jobs = group( - *create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_issues).tasks, - *create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_pull_requests).tasks + *create_grouped_task_load(dataList=first_pass, task=collect_issues).tasks, + *create_grouped_task_load(dataList=first_pass, task=collect_pull_requests).tasks ) secondary_repo_jobs = group( - *create_grouped_task_load(dataList=all_repo_git_identifiers, task=collect_events).tasks, - *create_grouped_task_load(dataList=all_repo_git_identifiers,task=collect_github_messages).tasks, - *create_grouped_task_load(dataList=all_repo_git_identifiers, task=process_pull_request_files).tasks, - *create_grouped_task_load(dataList=all_repo_git_identifiers, task=process_pull_request_commits).tasks + *create_grouped_task_load(dataList=first_pass, task=collect_events).tasks, + *create_grouped_task_load(dataList=first_pass,task=collect_github_messages).tasks, + *create_grouped_task_load(dataList=first_pass, task=process_pull_request_files).tasks, + *create_grouped_task_load(dataList=first_pass, task=process_pull_request_commits).tasks ) repo_task_group = group( *repo_info_tasks, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), - generate_facade_chain(logger), + generate_facade_chain(logger,first_pass), + *create_grouped_task_load(dataList=first_pass,task=process_dependency_metrics).tasks, collect_releases.si() ) result = chain(repo_task_group, refresh_materialized_views.si()).apply_async() with allow_join_result(): - return result.get() + result.wait() + + if len(np_clustered_array) == 0: + return + + + for cluster in np_clustered_array: + additionalPass = cluster.tolist() + #Pool the tasks for collecting repo info. + repo_info_tasks = create_grouped_task_load(dataList=additionalPass, task=collect_repo_info).tasks + + #pool the repo collection jobs that should be ran first and have deps. + primary_repo_jobs = group( + *create_grouped_task_load(dataList=additionalPass, task=collect_issues).tasks, + *create_grouped_task_load(dataList=additionalPass, task=collect_pull_requests).tasks + ) + + secondary_repo_jobs = group( + *create_grouped_task_load(dataList=additionalPass, task=collect_events).tasks, + *create_grouped_task_load(dataList=additionalPass,task=collect_github_messages).tasks, + *create_grouped_task_load(dataList=additionalPass, task=process_pull_request_files).tasks, + *create_grouped_task_load(dataList=additionalPass, task=process_pull_request_commits).tasks + ) + + repo_task_group = group( + *repo_info_tasks, + chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), + generate_facade_chain(logger,additionalPass), + *create_grouped_task_load(dataList=additionalPass,task=process_dependency_metrics).tasks + ) + + result = chain(repo_task_group, refresh_materialized_views.si()).apply_async() + + with allow_join_result(): + result.wait() + + return DEFINED_COLLECTION_PHASES = [prelim_phase, repo_collect_phase] diff --git a/result.txt b/result.txt new file mode 100644 index 0000000000..554c3000da --- /dev/null +++ b/result.txt @@ -0,0 +1,1598 @@ + . augur.tasks.github.facade_github.tasks.insert_facade_contributors + . augur.tasks.github.facade_github.tasks.insert_facade_contributors +[2023-01-12 12:24:13,654: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[ffb189d0-7f2f-4764-b483-f49e5a72df3b] received +[2023-01-12 12:24:13,678: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[63dcd9f8-f3ef-41f5-ad82-6d71247779eb] received +[2023-01-12 12:24:13,678: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[c34794a2-bb3a-4b56-9947-e27a2322f4cf] received +[2023-01-12 12:24:13,679: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[ca42a9a2-3811-4b42-9771-edf03a1bf89a] received +[2023-01-12 12:24:13,679: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[b9272f70-80e4-4bc6-bf00-287a126322ff] received +[2023-01-12 12:24:13,679: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[5b17661b-193a-437e-abe8-db972bef6e26] received +[2023-01-12 12:24:13,680: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[5bfc2ada-0703-4b20-8438-770d0883a1fa] received +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 24441 +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 1 +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02164fd91b52e1cfab84702cea9a1286b28eb792 +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/02164fd91b52e1cfab84702cea9a1286b28eb792 +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 24442 +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 011413603c412d1b48540d2119f7c1f10baea9cc +2023-01-12 12:24:13 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/011413603c412d1b48540d2119f7c1f10baea9cc +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 25445 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 00ef07a4cea89235da6d40308eae813eab0cddfc +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/00ef07a4cea89235da6d40308eae813eab0cddfc +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0005db68a59e3052c5bedb1c0b8862654e9707e3 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0005db68a59e3052c5bedb1c0b8862654e9707e3 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 25430 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0a63a98d33d71f33380ccba6c5cc3391bf57afd9 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/SociallyCompute/update-test/commits/0a63a98d33d71f33380ccba6c5cc3391bf57afd9 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 25450 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: aduggal@redhat.com +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01019154-cb00-0000-0000-000000000000'), 'cntrb_login': 'aakankshaduggal', 'cntrb_created_at': '2017-03-09T12:08:00Z', 'cntrb_email': 'aduggal@redhat.com', 'cntrb_company': '@redhat-et', 'cntrb_location': 'Boston, MA, USA', 'cntrb_canonical': 'aduggal@redhat.com', 'gh_user_id': 26301643, 'gh_login': 'aakankshaduggal', 'gh_url': 'https://api.github.com/users/aakankshaduggal', 'gh_html_url': 'https://github.com/aakankshaduggal', 'gh_node_id': 'MDQ6VXNlcjI2MzAxNjQz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/26301643?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/aakankshaduggal/followers', 'gh_following_url': 'https://api.github.com/users/aakankshaduggal/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/aakankshaduggal/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/aakankshaduggal/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/aakankshaduggal/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/aakankshaduggal/orgs', 'gh_repos_url': 'https://api.github.com/users/aakankshaduggal/repos', 'gh_events_url': 'https://api.github.com/users/aakankshaduggal/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/aakankshaduggal/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T11:22:57Z', 'cntrb_full_name': 'Aakanksha Duggal'} +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0026b03d590b6ef6ada5260c099960d3eea23992 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO cntrb_id 01019154-cb00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Creating alias for email: aduggal@redhat.com +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/0026b03d590b6ef6ada5260c099960d3eea23992 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Updating now resolved email aduggal@redhat.com +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 059f3e5d9f578a5c04e0498e0ea1305dd58a9e5b +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/059f3e5d9f578a5c04e0498e0ea1305dd58a9e5b +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Beginning process to insert contributors from facade commits for repo w entry info: 25452 +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: goern@redhat.com +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010003f8-eb00-0000-0000-000000000000'), 'cntrb_login': 'goern', 'cntrb_created_at': '2010-04-30T08:09:26Z', 'cntrb_email': 'goern@redhat.com', 'cntrb_company': '@redhat-et', 'cntrb_location': 'Bonn, Germany', 'cntrb_canonical': 'goern@redhat.com', 'gh_user_id': 260331, 'gh_login': 'goern', 'gh_url': 'https://api.github.com/users/goern', 'gh_html_url': 'https://github.com/goern', 'gh_node_id': 'MDQ6VXNlcjI2MDMzMQ==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/260331?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/goern/followers', 'gh_following_url': 'https://api.github.com/users/goern/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/goern/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/goern/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/goern/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/goern/orgs', 'gh_repos_url': 'https://api.github.com/users/goern/repos', 'gh_events_url': 'https://api.github.com/users/goern/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/goern/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T11:11:52Z', 'cntrb_full_name': 'Christoph Görn'} +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO cntrb_id 010003f8-eb00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Creating alias for email: goern@redhat.com +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Updating now resolved email goern@redhat.com +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 01309f0d2bbec00419372dafd063fe5808d757ad +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/01309f0d2bbec00419372dafd063fe5808d757ad +2023-01-12 12:24:14 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:24:14,998: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[5bfc2ada-0703-4b20-8438-770d0883a1fa] succeeded in 0.223045650000131s: None +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: sduenas@bitergia.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000cb7-4800-0000-0000-000000000000'), 'cntrb_login': 'sduenas', 'cntrb_created_at': '2011-06-06T18:31:00Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'sduenas@bitergia.com', 'gh_user_id': 833352, 'gh_login': 'sduenas', 'gh_url': 'https://api.github.com/users/sduenas', 'gh_html_url': 'https://github.com/sduenas', 'gh_node_id': 'MDQ6VXNlcjgzMzM1Mg==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/833352?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sduenas/followers', 'gh_following_url': 'https://api.github.com/users/sduenas/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sduenas/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sduenas/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sduenas/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sduenas/orgs', 'gh_repos_url': 'https://api.github.com/users/sduenas/repos', 'gh_events_url': 'https://api.github.com/users/sduenas/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sduenas/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-04T08:57:02Z', 'cntrb_full_name': 'Santiago Dueñas'} +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000cb7-4800-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: sduenas@bitergia.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email sduenas@bitergia.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02ab313134a129866887cf8cb94e0f645dfc2807 +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/02ab313134a129866887cf8cb94e0f645dfc2807 +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruffdev@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102b055-e200-0000-0000-000000000000'), 'cntrb_login': 'michaelwoodruffdev', 'cntrb_created_at': '2018-11-16T22:40:34Z', 'cntrb_email': 'michaelwoodruffdev@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'michaelwoodruffdev@gmail.com', 'gh_user_id': 45110754, 'gh_login': 'michaelwoodruffdev', 'gh_url': 'https://api.github.com/users/michaelwoodruffdev', 'gh_html_url': 'https://github.com/michaelwoodruffdev', 'gh_node_id': 'MDQ6VXNlcjQ1MTEwNzU0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/45110754?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/michaelwoodruffdev/followers', 'gh_following_url': 'https://api.github.com/users/michaelwoodruffdev/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/michaelwoodruffdev/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/michaelwoodruffdev/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/michaelwoodruffdev/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/michaelwoodruffdev/orgs', 'gh_repos_url': 'https://api.github.com/users/michaelwoodruffdev/repos', 'gh_events_url': 'https://api.github.com/users/michaelwoodruffdev/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/michaelwoodruffdev/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-12-02T23:29:49Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102b055-e200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruffdev@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruffdev@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0018b38ff01dfe5f32689d457b7dd7e0ef1e27fb +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0018b38ff01dfe5f32689d457b7dd7e0ef1e27fb +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: s@goggins.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010005cb-c700-0000-0000-000000000000'), 'cntrb_login': 'sgoggins', 'cntrb_created_at': '2010-08-29T16:25:48Z', 'cntrb_email': 's@goggins.com', 'cntrb_company': 'University of Missouri & Linux Foundation CHAOSS Working Group', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 's@goggins.com', 'gh_user_id': 379847, 'gh_login': 'sgoggins', 'gh_url': 'https://api.github.com/users/sgoggins', 'gh_html_url': 'https://github.com/sgoggins', 'gh_node_id': 'MDQ6VXNlcjM3OTg0Nw==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/379847?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sgoggins/followers', 'gh_following_url': 'https://api.github.com/users/sgoggins/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sgoggins/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sgoggins/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sgoggins/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sgoggins/orgs', 'gh_repos_url': 'https://api.github.com/users/sgoggins/repos', 'gh_events_url': 'https://api.github.com/users/sgoggins/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sgoggins/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T18:26:13Z', 'cntrb_full_name': 'Sean P. Goggins'} +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 010005cb-c700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: s@goggins.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email s@goggins.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: dmorenolumb@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008d21-d000-0000-0000-000000000000'), 'cntrb_login': 'dlumbrer', 'cntrb_created_at': '2014-10-15T09:13:12Z', 'cntrb_email': 'dmorenolumb@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Spain', 'cntrb_canonical': 'dmorenolumb@gmail.com', 'gh_user_id': 9249232, 'gh_login': 'dlumbrer', 'gh_url': 'https://api.github.com/users/dlumbrer', 'gh_html_url': 'https://github.com/dlumbrer', 'gh_node_id': 'MDQ6VXNlcjkyNDkyMzI=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/9249232?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/dlumbrer/followers', 'gh_following_url': 'https://api.github.com/users/dlumbrer/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/dlumbrer/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/dlumbrer/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/dlumbrer/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/dlumbrer/orgs', 'gh_repos_url': 'https://api.github.com/users/dlumbrer/repos', 'gh_events_url': 'https://api.github.com/users/dlumbrer/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/dlumbrer/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-20T20:56:18Z', 'cntrb_full_name': 'David Moreno Lumbreras'} +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008d21-d000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: dmorenolumb@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email dmorenolumb@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 029c5baab265f34aae5f1d673b2cb4daf6b391d7 +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/029c5baab265f34aae5f1d673b2cb4daf6b391d7 +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 54685329+isabelizimm@users.noreply.github.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103426e-9100-0000-0000-000000000000'), 'cntrb_login': 'isabelizimm', 'cntrb_created_at': '2019-08-29T19:10:54Z', 'cntrb_email': None, 'cntrb_company': '@rstudio', 'cntrb_location': 'Florida, USA', 'cntrb_canonical': '54685329+isabelizimm@users.noreply.github.com', 'gh_user_id': 54685329, 'gh_login': 'isabelizimm', 'gh_url': 'https://api.github.com/users/isabelizimm', 'gh_html_url': 'https://github.com/isabelizimm', 'gh_node_id': 'MDQ6VXNlcjU0Njg1MzI5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/54685329?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/isabelizimm/followers', 'gh_following_url': 'https://api.github.com/users/isabelizimm/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/isabelizimm/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/isabelizimm/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/isabelizimm/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/isabelizimm/orgs', 'gh_repos_url': 'https://api.github.com/users/isabelizimm/repos', 'gh_events_url': 'https://api.github.com/users/isabelizimm/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/isabelizimm/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T17:50:04Z', 'cntrb_full_name': 'Isabel Zimmerman'} +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103426e-9100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 54685329+isabelizimm@users.noreply.github.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:24:15,751: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[b9272f70-80e4-4bc6-bf00-287a126322ff] succeeded in 1.4141357480002625s: None +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 54685329+isabelizimm@users.noreply.github.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0a142fd82fea2edd311cbb50348f744d93d69528 +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/0a142fd82fea2edd311cbb50348f744d93d69528 +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: HumairAK@users.noreply.github.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100a665-8700-0000-0000-000000000000'), 'cntrb_login': 'HumairAK', 'cntrb_created_at': '2015-02-08T05:31:38Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat', 'cntrb_location': None, 'cntrb_canonical': 'HumairAK@users.noreply.github.com', 'gh_user_id': 10904967, 'gh_login': 'HumairAK', 'gh_url': 'https://api.github.com/users/HumairAK', 'gh_html_url': 'https://github.com/HumairAK', 'gh_node_id': 'MDQ6VXNlcjEwOTA0OTY3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/10904967?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/HumairAK/followers', 'gh_following_url': 'https://api.github.com/users/HumairAK/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/HumairAK/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/HumairAK/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/HumairAK/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/HumairAK/orgs', 'gh_repos_url': 'https://api.github.com/users/HumairAK/repos', 'gh_events_url': 'https://api.github.com/users/HumairAK/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/HumairAK/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T17:04:42Z', 'cntrb_full_name': 'Humair Khan'} +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100a665-8700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: HumairAK@users.noreply.github.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email HumairAK@users.noreply.github.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02ca9908e9ea209f6e77d1660026450da9582eec +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/02ca9908e9ea209f6e77d1660026450da9582eec +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: animuz111@gmail.com +2023-01-12 12:24:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101f002-df00-0000-0000-000000000000'), 'cntrb_login': 'animeshk08', 'cntrb_created_at': '2017-10-04T06:38:11Z', 'cntrb_email': 'animuz111@gmail.com', 'cntrb_company': '@chaoss @kubernetes', 'cntrb_location': 'Bengaluru, India', 'cntrb_canonical': 'animuz111@gmail.com', 'gh_user_id': 32506591, 'gh_login': 'animeshk08', 'gh_url': 'https://api.github.com/users/animeshk08', 'gh_html_url': 'https://github.com/animeshk08', 'gh_node_id': 'MDQ6VXNlcjMyNTA2NTkx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/32506591?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/animeshk08/followers', 'gh_following_url': 'https://api.github.com/users/animeshk08/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/animeshk08/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/animeshk08/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/animeshk08/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/animeshk08/orgs', 'gh_repos_url': 'https://api.github.com/users/animeshk08/repos', 'gh_events_url': 'https://api.github.com/users/animeshk08/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/animeshk08/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-21T12:39:21Z', 'cntrb_full_name': 'Animesh Kumar'} +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101f002-df00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: animuz111@gmail.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email animuz111@gmail.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 09e240b184df50a61557517836d4bcfba911e358 +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/09e240b184df50a61557517836d4bcfba911e358 +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: s@goggins.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010005cb-c700-0000-0000-000000000000'), 'cntrb_login': 'sgoggins', 'cntrb_created_at': '2010-08-29T16:25:48Z', 'cntrb_email': 's@goggins.com', 'cntrb_company': 'University of Missouri & Linux Foundation CHAOSS Working Group', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 's@goggins.com', 'gh_user_id': 379847, 'gh_login': 'sgoggins', 'gh_url': 'https://api.github.com/users/sgoggins', 'gh_html_url': 'https://github.com/sgoggins', 'gh_node_id': 'MDQ6VXNlcjM3OTg0Nw==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/379847?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sgoggins/followers', 'gh_following_url': 'https://api.github.com/users/sgoggins/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sgoggins/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sgoggins/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sgoggins/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sgoggins/orgs', 'gh_repos_url': 'https://api.github.com/users/sgoggins/repos', 'gh_events_url': 'https://api.github.com/users/sgoggins/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sgoggins/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T18:26:13Z', 'cntrb_full_name': 'Sean P. Goggins'} +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 010005cb-c700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: s@goggins.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email s@goggins.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0028c1a22755d86641331fcc25cf11ca184ee300 +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0028c1a22755d86641331fcc25cf11ca184ee300 +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: lcanas@bitergia.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010004fc-3f00-0000-0000-000000000000'), 'cntrb_login': 'sanacl', 'cntrb_created_at': '2010-07-08T18:53:20Z', 'cntrb_email': 'lcanas@bitergia.com', 'cntrb_company': 'Bitergia', 'cntrb_location': 'Alcorcón', 'cntrb_canonical': 'lcanas@bitergia.com', 'gh_user_id': 326719, 'gh_login': 'sanacl', 'gh_url': 'https://api.github.com/users/sanacl', 'gh_html_url': 'https://github.com/sanacl', 'gh_node_id': 'MDQ6VXNlcjMyNjcxOQ==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/326719?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sanacl/followers', 'gh_following_url': 'https://api.github.com/users/sanacl/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sanacl/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sanacl/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sanacl/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sanacl/orgs', 'gh_repos_url': 'https://api.github.com/users/sanacl/repos', 'gh_events_url': 'https://api.github.com/users/sanacl/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sanacl/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-12T16:34:16Z', 'cntrb_full_name': 'Luis Cañas-Díaz'} +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 010004fc-3f00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: lcanas@bitergia.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email lcanas@bitergia.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 05d9a0ada9262dc4e6926f8ca68b2f9a96963ae2 +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/05d9a0ada9262dc4e6926f8ca68b2f9a96963ae2 +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ochatter@redhat.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101eeec-0600-0000-0000-000000000000'), 'cntrb_login': 'oindrillac', 'cntrb_created_at': '2017-10-01T15:06:47Z', 'cntrb_email': 'ochatter@redhat.com', 'cntrb_company': '@AICoE and @operate-first at Red Hat', 'cntrb_location': 'Boston ', 'cntrb_canonical': 'ochatter@redhat.com', 'gh_user_id': 32435206, 'gh_login': 'oindrillac', 'gh_url': 'https://api.github.com/users/oindrillac', 'gh_html_url': 'https://github.com/oindrillac', 'gh_node_id': 'MDQ6VXNlcjMyNDM1MjA2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/32435206?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/oindrillac/followers', 'gh_following_url': 'https://api.github.com/users/oindrillac/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/oindrillac/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/oindrillac/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/oindrillac/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/oindrillac/orgs', 'gh_repos_url': 'https://api.github.com/users/oindrillac/repos', 'gh_events_url': 'https://api.github.com/users/oindrillac/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/oindrillac/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T07:31:27Z', 'cntrb_full_name': 'Oindrilla Chatterjee'} +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101eeec-0600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ochatter@redhat.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ochatter@redhat.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0f27e1c7488a9f12f0bd0283af06cc9b45d3306a +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/0f27e1c7488a9f12f0bd0283af06cc9b45d3306a +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: hild@b4mad.net +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000278-6000-0000-0000-000000000000'), 'cntrb_login': 'durandom', 'cntrb_created_at': '2009-12-04T13:09:02Z', 'cntrb_email': 'mhild@redhat.com', 'cntrb_company': 'Red Hat', 'cntrb_location': 'Kiel, Germany', 'cntrb_canonical': 'mhild@redhat.com', 'gh_user_id': 161888, 'gh_login': 'durandom', 'gh_url': 'https://api.github.com/users/durandom', 'gh_html_url': 'https://github.com/durandom', 'gh_node_id': 'MDQ6VXNlcjE2MTg4OA==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/161888?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/durandom/followers', 'gh_following_url': 'https://api.github.com/users/durandom/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/durandom/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/durandom/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/durandom/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/durandom/orgs', 'gh_repos_url': 'https://api.github.com/users/durandom/repos', 'gh_events_url': 'https://api.github.com/users/durandom/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/durandom/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-17T17:09:21Z', 'cntrb_full_name': 'Marcel Hild'} +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000278-6000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: hild@b4mad.net +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email hild@b4mad.net +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: valcos@bitergia.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01006369-7b00-0000-0000-000000000000'), 'cntrb_login': 'valeriocos', 'cntrb_created_at': '2014-01-27T14:30:30Z', 'cntrb_email': 'valerio.cosentino@gmail.com', 'cntrb_company': 'Eventbrite', 'cntrb_location': 'Madrid, Spain', 'cntrb_canonical': 'valerio.cosentino@gmail.com', 'gh_user_id': 6515067, 'gh_login': 'valeriocos', 'gh_url': 'https://api.github.com/users/valeriocos', 'gh_html_url': 'https://github.com/valeriocos', 'gh_node_id': 'MDQ6VXNlcjY1MTUwNjc=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/6515067?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/valeriocos/followers', 'gh_following_url': 'https://api.github.com/users/valeriocos/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/valeriocos/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/valeriocos/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/valeriocos/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/valeriocos/orgs', 'gh_repos_url': 'https://api.github.com/users/valeriocos/repos', 'gh_events_url': 'https://api.github.com/users/valeriocos/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/valeriocos/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-22T07:55:51Z', 'cntrb_full_name': 'valerio'} +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 01006369-7b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: valcos@bitergia.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email valcos@bitergia.com +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:16 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Alvaro del Castillo', 'hash': '05d9a0ada9262dc4e6926f8ca68b2f9a96963ae2', 'email_raw': 'acs@bitergia.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0d11b347a2ff527ccce256bcd422f9f1e62a4d07 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/0d11b347a2ff527ccce256bcd422f9f1e62a4d07 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: krabs@tilde.team +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010177f7-2400-0000-0000-000000000000'), 'cntrb_login': 'IsaacMilarky', 'cntrb_created_at': '2016-12-18T19:35:40Z', 'cntrb_email': 'imilarsky@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'imilarsky@gmail.com', 'gh_user_id': 24639268, 'gh_login': 'IsaacMilarky', 'gh_url': 'https://api.github.com/users/IsaacMilarky', 'gh_html_url': 'https://github.com/IsaacMilarky', 'gh_node_id': 'MDQ6VXNlcjI0NjM5MjY4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24639268?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/IsaacMilarky/followers', 'gh_following_url': 'https://api.github.com/users/IsaacMilarky/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/IsaacMilarky/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/IsaacMilarky/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/IsaacMilarky/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/IsaacMilarky/orgs', 'gh_repos_url': 'https://api.github.com/users/IsaacMilarky/repos', 'gh_events_url': 'https://api.github.com/users/IsaacMilarky/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/IsaacMilarky/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-29T17:34:46Z', 'cntrb_full_name': 'Isaac Milarsky'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO cntrb_id 010177f7-2400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Creating alias for email: krabs@tilde.team +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mhild@redhat.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000278-6000-0000-0000-000000000000'), 'cntrb_login': 'durandom', 'cntrb_created_at': '2009-12-04T13:09:02Z', 'cntrb_email': 'mhild@redhat.com', 'cntrb_company': 'Red Hat', 'cntrb_location': 'Kiel, Germany', 'cntrb_canonical': 'mhild@redhat.com', 'gh_user_id': 161888, 'gh_login': 'durandom', 'gh_url': 'https://api.github.com/users/durandom', 'gh_html_url': 'https://github.com/durandom', 'gh_node_id': 'MDQ6VXNlcjE2MTg4OA==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/161888?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/durandom/followers', 'gh_following_url': 'https://api.github.com/users/durandom/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/durandom/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/durandom/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/durandom/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/durandom/orgs', 'gh_repos_url': 'https://api.github.com/users/durandom/repos', 'gh_events_url': 'https://api.github.com/users/durandom/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/durandom/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-17T17:09:21Z', 'cntrb_full_name': 'Marcel Hild'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Updating now resolved email krabs@tilde.team +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000278-6000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mhild@redhat.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 002b7f992e9afbd21a3a5f481ef79fffa992a0f7 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/002b7f992e9afbd21a3a5f481ef79fffa992a0f7 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mhild@redhat.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0e1eaabd4395b210fcf9531a8b8425cf5de2ba6d +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/0e1eaabd4395b210fcf9531a8b8425cf5de2ba6d +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=acs@bitergia.com+in:email+type:user +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from acs@bitergia.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'acs@bitergia.com', 'name': 'Alvaro del Castillo'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 7453394+tumido@users.noreply.github.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010071ba-d200-0000-0000-000000000000'), 'cntrb_login': 'tumido', 'cntrb_created_at': '2014-04-30T20:36:34Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat', 'cntrb_location': 'Czech Republic', 'cntrb_canonical': '7453394+tumido@users.noreply.github.com', 'gh_user_id': 7453394, 'gh_login': 'tumido', 'gh_url': 'https://api.github.com/users/tumido', 'gh_html_url': 'https://github.com/tumido', 'gh_node_id': 'MDQ6VXNlcjc0NTMzOTQ=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/7453394?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/tumido/followers', 'gh_following_url': 'https://api.github.com/users/tumido/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/tumido/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/tumido/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/tumido/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/tumido/orgs', 'gh_repos_url': 'https://api.github.com/users/tumido/repos', 'gh_events_url': 'https://api.github.com/users/tumido/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/tumido/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-23T12:21:31Z', 'cntrb_full_name': 'Tom Coufal'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO cntrb_id 010071ba-d200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 7453394+tumido@users.noreply.github.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 7453394+tumido@users.noreply.github.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1ead8b4320e1fd8d5c7d89203748de5ccc6a71c3 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/1ead8b4320e1fd8d5c7d89203748de5ccc6a71c3 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'isaacmilarky', 'hash': '002b7f992e9afbd21a3a5f481ef79fffa992a0f7', 'email_raw': 'imilarksy@gmail.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'avomakesart', 'id': 40504240, 'node_id': 'MDQ6VXNlcjQwNTA0MjQw', 'avatar_url': 'https://avatars.githubusercontent.com/u/40504240?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/avomakesart', 'html_url': 'https://github.com/avomakesart', 'followers_url': 'https://api.github.com/users/avomakesart/followers', 'following_url': 'https://api.github.com/users/avomakesart/following{/other_user}', 'gists_url': 'https://api.github.com/users/avomakesart/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/avomakesart/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/avomakesart/subscriptions', 'organizations_url': 'https://api.github.com/users/avomakesart/orgs', 'repos_url': 'https://api.github.com/users/avomakesart/repos', 'events_url': 'https://api.github.com/users/avomakesart/events{/privacy}', 'received_events_url': 'https://api.github.com/users/avomakesart/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mafesan@bitergia.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010089d4-5600-0000-0000-000000000000'), 'cntrb_login': 'mafesan', 'cntrb_created_at': '2014-10-06T07:31:13Z', 'cntrb_email': None, 'cntrb_company': 'Bitergia', 'cntrb_location': 'Madrid, Spain', 'cntrb_canonical': 'mafesan@bitergia.com', 'gh_user_id': 9032790, 'gh_login': 'mafesan', 'gh_url': 'https://api.github.com/users/mafesan', 'gh_html_url': 'https://github.com/mafesan', 'gh_node_id': 'MDQ6VXNlcjkwMzI3OTA=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/9032790?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mafesan/followers', 'gh_following_url': 'https://api.github.com/users/mafesan/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mafesan/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mafesan/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mafesan/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mafesan/orgs', 'gh_repos_url': 'https://api.github.com/users/mafesan/repos', 'gh_events_url': 'https://api.github.com/users/mafesan/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mafesan/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-04T12:19:52Z', 'cntrb_full_name': 'Miguel Ángel Fernández'} +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO cntrb_id 010089d4-5600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mafesan@bitergia.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mafesan@bitergia.com +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1077a9fc2eac46c92391202da01d8cdf8460af36 +2023-01-12 12:24:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/1077a9fc2eac46c92391202da01d8cdf8460af36 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 61122099+ipolonsk@users.noreply.github.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103a4a6-3300-0000-0000-000000000000'), 'cntrb_login': 'ipolonsk', 'cntrb_created_at': '2020-02-16T19:04:02Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat', 'cntrb_location': None, 'cntrb_canonical': '61122099+ipolonsk@users.noreply.github.com', 'gh_user_id': 61122099, 'gh_login': 'ipolonsk', 'gh_url': 'https://api.github.com/users/ipolonsk', 'gh_html_url': 'https://github.com/ipolonsk', 'gh_node_id': 'MDQ6VXNlcjYxMTIyMDk5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/61122099?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ipolonsk/followers', 'gh_following_url': 'https://api.github.com/users/ipolonsk/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ipolonsk/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ipolonsk/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ipolonsk/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ipolonsk/orgs', 'gh_repos_url': 'https://api.github.com/users/ipolonsk/repos', 'gh_events_url': 'https://api.github.com/users/ipolonsk/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ipolonsk/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-10-10T07:37:50Z', 'cntrb_full_name': 'Ilana Polonsky'} +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=imilarksy@gmail.com+in:email+type:user +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from imilarksy@gmail.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'imilarksy@gmail.com', 'name': 'isaacmilarky'} +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103a4a6-3300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 61122099+ipolonsk@users.noreply.github.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0032a5ad92449df87e060391a502f7c65559c249 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 61122099+ipolonsk@users.noreply.github.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0032a5ad92449df87e060391a502f7c65559c249 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: acs@bitergia.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01026a0b-b000-0000-0000-000000000000'), 'cntrb_login': 'avomakesart', 'cntrb_created_at': '2018-06-22T21:30:22Z', 'cntrb_email': 'alvaro.castillo777@gmail.com', 'cntrb_company': '@bol.com', 'cntrb_location': 'Utrecht, Netherlands', 'cntrb_canonical': 'alvaro.castillo777@gmail.com', 'gh_user_id': 40504240, 'gh_login': 'avomakesart', 'gh_url': 'https://api.github.com/users/avomakesart', 'gh_html_url': 'https://github.com/avomakesart', 'gh_node_id': 'MDQ6VXNlcjQwNTA0MjQw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/40504240?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/avomakesart/followers', 'gh_following_url': 'https://api.github.com/users/avomakesart/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/avomakesart/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/avomakesart/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/avomakesart/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/avomakesart/orgs', 'gh_repos_url': 'https://api.github.com/users/avomakesart/repos', 'gh_events_url': 'https://api.github.com/users/avomakesart/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/avomakesart/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T13:33:21Z', 'cntrb_full_name': 'Alvaro Castillo '} +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO cntrb_id 01026a0b-b000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Creating alias for email: acs@bitergia.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0ee0e66cb184f0468cc3c807faab291892ccde47 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/0ee0e66cb184f0468cc3c807faab291892ccde47 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Updating now resolved email acs@bitergia.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0ecfeb26dc9661506f21f3309bbc17d6f1190d32 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/0ecfeb26dc9661506f21f3309bbc17d6f1190d32 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mcliffor@redhat.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01004496-3a00-0000-0000-000000000000'), 'cntrb_login': 'MichaelClifford', 'cntrb_created_at': '2013-05-22T02:10:44Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'mcliffor@redhat.com', 'gh_user_id': 4494906, 'gh_login': 'MichaelClifford', 'gh_url': 'https://api.github.com/users/MichaelClifford', 'gh_html_url': 'https://github.com/MichaelClifford', 'gh_node_id': 'MDQ6VXNlcjQ0OTQ5MDY=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/4494906?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/MichaelClifford/followers', 'gh_following_url': 'https://api.github.com/users/MichaelClifford/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/MichaelClifford/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/MichaelClifford/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/MichaelClifford/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/MichaelClifford/orgs', 'gh_repos_url': 'https://api.github.com/users/MichaelClifford/repos', 'gh_events_url': 'https://api.github.com/users/MichaelClifford/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/MichaelClifford/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T15:23:45Z', 'cntrb_full_name': 'Michael Clifford'} +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO cntrb_id 01004496-3a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mcliffor@redhat.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mcliffor@redhat.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1fd796368ab451d7f58395535dc4b04b13a8073d +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/1fd796368ab451d7f58395535dc4b04b13a8073d +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: venu@bitergia.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01018185-2b00-0000-0000-000000000000'), 'cntrb_login': 'vchrombie', 'cntrb_created_at': '2017-01-21T12:45:08Z', 'cntrb_email': 'vt2182@nyu.edu', 'cntrb_company': '@chaoss ex- @bitergia @amfoss', 'cntrb_location': 'Brooklyn, NY', 'cntrb_canonical': 'vt2182@nyu.edu', 'gh_user_id': 25265451, 'gh_login': 'vchrombie', 'gh_url': 'https://api.github.com/users/vchrombie', 'gh_html_url': 'https://github.com/vchrombie', 'gh_node_id': 'MDQ6VXNlcjI1MjY1NDUx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/25265451?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/vchrombie/followers', 'gh_following_url': 'https://api.github.com/users/vchrombie/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/vchrombie/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/vchrombie/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/vchrombie/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/vchrombie/orgs', 'gh_repos_url': 'https://api.github.com/users/vchrombie/repos', 'gh_events_url': 'https://api.github.com/users/vchrombie/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/vchrombie/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-08T18:55:42Z', 'cntrb_full_name': 'Venu Vardhan Reddy Tekula'} +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'J. Manrique Lopez de la Fuente', 'hash': '0ecfeb26dc9661506f21f3309bbc17d6f1190d32', 'email_raw': 'jsmanrique@bitergia.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO cntrb_id 01018185-2b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Creating alias for email: venu@bitergia.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Updating now resolved email venu@bitergia.com +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 16d309a5b2d2c2c582615f77ecf23a1c4f819525 +2023-01-12 12:24:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/16d309a5b2d2c2c582615f77ecf23a1c4f819525 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: saicharan.reddy1@gmail.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101807a-5b00-0000-0000-000000000000'), 'cntrb_login': 'mrsaicharan1', 'cntrb_created_at': '2017-01-18T09:49:30Z', 'cntrb_email': 'saicharan.reddy1@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Plano, Texas', 'cntrb_canonical': 'saicharan.reddy1@gmail.com', 'gh_user_id': 25197147, 'gh_login': 'mrsaicharan1', 'gh_url': 'https://api.github.com/users/mrsaicharan1', 'gh_html_url': 'https://github.com/mrsaicharan1', 'gh_node_id': 'MDQ6VXNlcjI1MTk3MTQ3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/25197147?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mrsaicharan1/followers', 'gh_following_url': 'https://api.github.com/users/mrsaicharan1/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mrsaicharan1/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mrsaicharan1/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mrsaicharan1/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mrsaicharan1/orgs', 'gh_repos_url': 'https://api.github.com/users/mrsaicharan1/repos', 'gh_events_url': 'https://api.github.com/users/mrsaicharan1/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mrsaicharan1/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-06T17:41:35Z', 'cntrb_full_name': 'Saicharan Reddy'} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mpovolny@redhat.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010000c7-9700-0000-0000-000000000000'), 'cntrb_login': 'martinpovolny', 'cntrb_created_at': '2009-02-02T12:17:11Z', 'cntrb_email': 'mpovolny@redhat.com', 'cntrb_company': 'Red Hat', 'cntrb_location': 'Brno, Czech Republic', 'cntrb_canonical': 'mpovolny@redhat.com', 'gh_user_id': 51095, 'gh_login': 'martinpovolny', 'gh_url': 'https://api.github.com/users/martinpovolny', 'gh_html_url': 'https://github.com/martinpovolny', 'gh_node_id': 'MDQ6VXNlcjUxMDk1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/51095?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/martinpovolny/followers', 'gh_following_url': 'https://api.github.com/users/martinpovolny/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/martinpovolny/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/martinpovolny/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/martinpovolny/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/martinpovolny/orgs', 'gh_repos_url': 'https://api.github.com/users/martinpovolny/repos', 'gh_events_url': 'https://api.github.com/users/martinpovolny/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/martinpovolny/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-08T05:40:49Z', 'cntrb_full_name': 'Martin Povolny'} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=jsmanrique@bitergia.com+in:email+type:user +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from jsmanrique@bitergia.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'jsmanrique@bitergia.com', 'name': 'J. Manrique Lopez de la Fuente'} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101807a-5b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: saicharan.reddy1@gmail.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email saicharan.reddy1@gmail.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 010000c7-9700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mpovolny@redhat.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mpovolny@redhat.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 14128dfc115f338708999035ab7e8ac0278f8533 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/14128dfc115f338708999035ab7e8ac0278f8533 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: hema.veeradhi@gmail.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100700b-fb00-0000-0000-000000000000'), 'cntrb_login': 'hemajv', 'cntrb_created_at': '2014-04-19T01:15:06Z', 'cntrb_email': 'hveeradh@redhat.com', 'cntrb_company': '@AICoE, @operate-first at Red Hat', 'cntrb_location': 'Boston', 'cntrb_canonical': 'hveeradh@redhat.com', 'gh_user_id': 7343099, 'gh_login': 'hemajv', 'gh_url': 'https://api.github.com/users/hemajv', 'gh_html_url': 'https://github.com/hemajv', 'gh_node_id': 'MDQ6VXNlcjczNDMwOTk=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/7343099?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/hemajv/followers', 'gh_following_url': 'https://api.github.com/users/hemajv/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/hemajv/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/hemajv/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/hemajv/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/hemajv/orgs', 'gh_repos_url': 'https://api.github.com/users/hemajv/repos', 'gh_events_url': 'https://api.github.com/users/hemajv/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/hemajv/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-04T12:45:07Z', 'cntrb_full_name': 'Hema Veeradhi'} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100700b-fb00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: hema.veeradhi@gmail.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email hema.veeradhi@gmail.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 29211658b82ebb628adddeb9c10bc26fac7f6ae4 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/29211658b82ebb628adddeb9c10bc26fac7f6ae4 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'jorgejavierfm', 'id': 13924249, 'node_id': 'MDQ6VXNlcjEzOTI0MjQ5', 'avatar_url': 'https://avatars.githubusercontent.com/u/13924249?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/jorgejavierfm', 'html_url': 'https://github.com/jorgejavierfm', 'followers_url': 'https://api.github.com/users/jorgejavierfm/followers', 'following_url': 'https://api.github.com/users/jorgejavierfm/following{/other_user}', 'gists_url': 'https://api.github.com/users/jorgejavierfm/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/jorgejavierfm/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/jorgejavierfm/subscriptions', 'organizations_url': 'https://api.github.com/users/jorgejavierfm/orgs', 'repos_url': 'https://api.github.com/users/jorgejavierfm/repos', 'events_url': 'https://api.github.com/users/jorgejavierfm/events{/privacy}', 'received_events_url': 'https://api.github.com/users/jorgejavierfm/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: outdoors@acm.org +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010005cb-c700-0000-0000-000000000000'), 'cntrb_login': 'sgoggins', 'cntrb_created_at': '2010-08-29T16:25:48Z', 'cntrb_email': 's@goggins.com', 'cntrb_company': 'University of Missouri & Linux Foundation CHAOSS Working Group', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 's@goggins.com', 'gh_user_id': 379847, 'gh_login': 'sgoggins', 'gh_url': 'https://api.github.com/users/sgoggins', 'gh_html_url': 'https://github.com/sgoggins', 'gh_node_id': 'MDQ6VXNlcjM3OTg0Nw==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/379847?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sgoggins/followers', 'gh_following_url': 'https://api.github.com/users/sgoggins/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sgoggins/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sgoggins/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sgoggins/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sgoggins/orgs', 'gh_repos_url': 'https://api.github.com/users/sgoggins/repos', 'gh_events_url': 'https://api.github.com/users/sgoggins/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sgoggins/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T18:26:13Z', 'cntrb_full_name': 'Sean P. Goggins'} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jjmerchante@bitergia.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100a072-4400-0000-0000-000000000000'), 'cntrb_login': 'jjmerchante', 'cntrb_created_at': '2015-01-13T12:16:17Z', 'cntrb_email': None, 'cntrb_company': 'URJC', 'cntrb_location': None, 'cntrb_canonical': 'jjmerchante@bitergia.com', 'gh_user_id': 10515012, 'gh_login': 'jjmerchante', 'gh_url': 'https://api.github.com/users/jjmerchante', 'gh_html_url': 'https://github.com/jjmerchante', 'gh_node_id': 'MDQ6VXNlcjEwNTE1MDEy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/10515012?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jjmerchante/followers', 'gh_following_url': 'https://api.github.com/users/jjmerchante/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jjmerchante/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jjmerchante/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jjmerchante/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jjmerchante/orgs', 'gh_repos_url': 'https://api.github.com/users/jjmerchante/repos', 'gh_events_url': 'https://api.github.com/users/jjmerchante/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jjmerchante/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-08-08T07:17:00Z', 'cntrb_full_name': 'Jose Javier Merchante'} +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 010005cb-c700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: outdoors@acm.org +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email outdoors@acm.org +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100a072-4400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jjmerchante@bitergia.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jjmerchante@bitergia.com +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 19459aed6581de7e984ee325e01fc9885b25d68b +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/19459aed6581de7e984ee325e01fc9885b25d68b +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 00503961a30fc380ab27c612a3639d6f43ac8489 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/00503961a30fc380ab27c612a3639d6f43ac8489 +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:19 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Anand Sanmukhani', 'hash': '14128dfc115f338708999035ab7e8ac0278f8533', 'email_raw': 'asanmukh@redhat.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jsmanrique@bitergia.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100d477-9900-0000-0000-000000000000'), 'cntrb_login': 'jorgejavierfm', 'cntrb_created_at': '2015-08-23T01:17:00Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Miami', 'cntrb_canonical': 'jsmanrique@bitergia.com', 'gh_user_id': 13924249, 'gh_login': 'jorgejavierfm', 'gh_url': 'https://api.github.com/users/jorgejavierfm', 'gh_html_url': 'https://github.com/jorgejavierfm', 'gh_node_id': 'MDQ6VXNlcjEzOTI0MjQ5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/13924249?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jorgejavierfm/followers', 'gh_following_url': 'https://api.github.com/users/jorgejavierfm/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jorgejavierfm/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jorgejavierfm/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jorgejavierfm/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jorgejavierfm/orgs', 'gh_repos_url': 'https://api.github.com/users/jorgejavierfm/repos', 'gh_events_url': 'https://api.github.com/users/jorgejavierfm/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jorgejavierfm/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-03-22T16:26:17Z', 'cntrb_full_name': 'Jorge J. de la Fuente'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100d477-9900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jsmanrique@bitergia.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jsmanrique@bitergia.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 3eba26a950d6548c2618c3034f231de1bc155810 +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/3eba26a950d6548c2618c3034f231de1bc155810 +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=asanmukh@redhat.com+in:email+type:user +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from asanmukh@redhat.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'asanmukh@redhat.com', 'name': 'Anand Sanmukhani'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 41898282+github-actions[bot]@users.noreply.github.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01027f51-2a00-0000-0000-000000000000'), 'cntrb_login': 'github-actions[bot]', 'cntrb_created_at': '2018-07-30T09:30:16Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '41898282+github-actions[bot]@users.noreply.github.com', 'gh_user_id': 41898282, 'gh_login': 'github-actions[bot]', 'gh_url': 'https://api.github.com/users/github-actions%5Bbot%5D', 'gh_html_url': 'https://github.com/apps/github-actions', 'gh_node_id': 'MDM6Qm90NDE4OTgyODI=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/in/15368?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/github-actions%5Bbot%5D/followers', 'gh_following_url': 'https://api.github.com/users/github-actions%5Bbot%5D/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/github-actions%5Bbot%5D/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/github-actions%5Bbot%5D/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/github-actions%5Bbot%5D/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/github-actions%5Bbot%5D/orgs', 'gh_repos_url': 'https://api.github.com/users/github-actions%5Bbot%5D/repos', 'gh_events_url': 'https://api.github.com/users/github-actions%5Bbot%5D/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/github-actions%5Bbot%5D/received_events', 'gh_type': 'Bot', 'gh_site_admin': False, 'cntrb_last_used': '2018-09-18T23:02:41Z', 'cntrb_full_name': 'github-actions[bot]'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 01027f51-2a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 41898282+github-actions[bot]@users.noreply.github.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 41898282+github-actions[bot]@users.noreply.github.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': '4n4nd', 'id': 22333506, 'node_id': 'MDQ6VXNlcjIyMzMzNTA2', 'avatar_url': 'https://avatars.githubusercontent.com/u/22333506?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/4n4nd', 'html_url': 'https://github.com/4n4nd', 'followers_url': 'https://api.github.com/users/4n4nd/followers', 'following_url': 'https://api.github.com/users/4n4nd/following{/other_user}', 'gists_url': 'https://api.github.com/users/4n4nd/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/4n4nd/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/4n4nd/subscriptions', 'organizations_url': 'https://api.github.com/users/4n4nd/orgs', 'repos_url': 'https://api.github.com/users/4n4nd/repos', 'events_url': 'https://api.github.com/users/4n4nd/events{/privacy}', 'received_events_url': 'https://api.github.com/users/4n4nd/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: quan@bitergia.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01005604-7100-0000-0000-000000000000'), 'cntrb_login': 'zhquan', 'cntrb_created_at': '2013-10-08T10:39:58Z', 'cntrb_email': 'quan@bitergia.com', 'cntrb_company': 'Bitergia', 'cntrb_location': 'Madrid', 'cntrb_canonical': 'quan@bitergia.com', 'gh_user_id': 5637233, 'gh_login': 'zhquan', 'gh_url': 'https://api.github.com/users/zhquan', 'gh_html_url': 'https://github.com/zhquan', 'gh_node_id': 'MDQ6VXNlcjU2MzcyMzM=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/5637233?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/zhquan/followers', 'gh_following_url': 'https://api.github.com/users/zhquan/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/zhquan/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/zhquan/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/zhquan/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/zhquan/orgs', 'gh_repos_url': 'https://api.github.com/users/zhquan/repos', 'gh_events_url': 'https://api.github.com/users/zhquan/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/zhquan/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-02T09:23:36Z', 'cntrb_full_name': 'Quan Zhou'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 01005604-7100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: quan@bitergia.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Updating now resolved email quan@bitergia.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 265ba9a4f3026092255decdce765f02fae4d3409 +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/265ba9a4f3026092255decdce765f02fae4d3409 +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: oc@bu.edu +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101eeec-0600-0000-0000-000000000000'), 'cntrb_login': 'oindrillac', 'cntrb_created_at': '2017-10-01T15:06:47Z', 'cntrb_email': 'ochatter@redhat.com', 'cntrb_company': '@AICoE and @operate-first at Red Hat', 'cntrb_location': 'Boston ', 'cntrb_canonical': 'ochatter@redhat.com', 'gh_user_id': 32435206, 'gh_login': 'oindrillac', 'gh_url': 'https://api.github.com/users/oindrillac', 'gh_html_url': 'https://github.com/oindrillac', 'gh_node_id': 'MDQ6VXNlcjMyNDM1MjA2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/32435206?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/oindrillac/followers', 'gh_following_url': 'https://api.github.com/users/oindrillac/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/oindrillac/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/oindrillac/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/oindrillac/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/oindrillac/orgs', 'gh_repos_url': 'https://api.github.com/users/oindrillac/repos', 'gh_events_url': 'https://api.github.com/users/oindrillac/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/oindrillac/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T07:31:27Z', 'cntrb_full_name': 'Oindrilla Chatterjee'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101eeec-0600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: oc@bu.edu +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Updating now resolved email oc@bu.edu +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ccarterlandis@gmail.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101e100-7300-0000-0000-000000000000'), 'cntrb_login': 'ccarterlandis', 'cntrb_created_at': '2017-08-31T22:01:37Z', 'cntrb_email': 'c@carterlandis.com', 'cntrb_company': '@Gusto', 'cntrb_location': None, 'cntrb_canonical': 'c@carterlandis.com', 'gh_user_id': 31522931, 'gh_login': 'ccarterlandis', 'gh_url': 'https://api.github.com/users/ccarterlandis', 'gh_html_url': 'https://github.com/ccarterlandis', 'gh_node_id': 'MDQ6VXNlcjMxNTIyOTMx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/31522931?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ccarterlandis/followers', 'gh_following_url': 'https://api.github.com/users/ccarterlandis/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ccarterlandis/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ccarterlandis/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ccarterlandis/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ccarterlandis/orgs', 'gh_repos_url': 'https://api.github.com/users/ccarterlandis/repos', 'gh_events_url': 'https://api.github.com/users/ccarterlandis/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ccarterlandis/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T17:00:36Z', 'cntrb_full_name': 'Carter Landis'} +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 5ac80d639f93df06373f6645c0e5e3c85be0fdde +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/5ac80d639f93df06373f6645c0e5e3c85be0fdde +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101e100-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ccarterlandis@gmail.com +2023-01-12 12:24:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ccarterlandis@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: asanmukh@redhat.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010154c8-4200-0000-0000-000000000000'), 'cntrb_login': '4n4nd', 'cntrb_created_at': '2016-09-20T23:56:14Z', 'cntrb_email': None, 'cntrb_company': 'Microsoft', 'cntrb_location': None, 'cntrb_canonical': 'asanmukh@redhat.com', 'gh_user_id': 22333506, 'gh_login': '4n4nd', 'gh_url': 'https://api.github.com/users/4n4nd', 'gh_html_url': 'https://github.com/4n4nd', 'gh_node_id': 'MDQ6VXNlcjIyMzMzNTA2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/22333506?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/4n4nd/followers', 'gh_following_url': 'https://api.github.com/users/4n4nd/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/4n4nd/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/4n4nd/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/4n4nd/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/4n4nd/orgs', 'gh_repos_url': 'https://api.github.com/users/4n4nd/repos', 'gh_events_url': 'https://api.github.com/users/4n4nd/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/4n4nd/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T10:24:35Z', 'cntrb_full_name': 'Anand Sanmukhani'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jj.merchante@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100a072-4400-0000-0000-000000000000'), 'cntrb_login': 'jjmerchante', 'cntrb_created_at': '2015-01-13T12:16:17Z', 'cntrb_email': None, 'cntrb_company': 'URJC', 'cntrb_location': None, 'cntrb_canonical': 'jj.merchante@gmail.com', 'gh_user_id': 10515012, 'gh_login': 'jjmerchante', 'gh_url': 'https://api.github.com/users/jjmerchante', 'gh_html_url': 'https://github.com/jjmerchante', 'gh_node_id': 'MDQ6VXNlcjEwNTE1MDEy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/10515012?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jjmerchante/followers', 'gh_following_url': 'https://api.github.com/users/jjmerchante/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jjmerchante/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jjmerchante/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jjmerchante/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jjmerchante/orgs', 'gh_repos_url': 'https://api.github.com/users/jjmerchante/repos', 'gh_events_url': 'https://api.github.com/users/jjmerchante/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jjmerchante/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-08-08T07:17:00Z', 'cntrb_full_name': 'Jose Javier Merchante'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0055d5e5eb2f3b14b16482bb44fd831742b2fbb1 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100a072-4400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jj.merchante@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0055d5e5eb2f3b14b16482bb44fd831742b2fbb1 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 010154c8-4200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: asanmukh@redhat.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jj.merchante@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email asanmukh@redhat.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 54897b33c0e78e346d28b00556d7f3a55a9bcf19 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/54897b33c0e78e346d28b00556d7f3a55a9bcf19 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: humair88@hotmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100a665-8700-0000-0000-000000000000'), 'cntrb_login': 'HumairAK', 'cntrb_created_at': '2015-02-08T05:31:38Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat', 'cntrb_location': None, 'cntrb_canonical': 'humair88@hotmail.com', 'gh_user_id': 10904967, 'gh_login': 'HumairAK', 'gh_url': 'https://api.github.com/users/HumairAK', 'gh_html_url': 'https://github.com/HumairAK', 'gh_node_id': 'MDQ6VXNlcjEwOTA0OTY3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/10904967?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/HumairAK/followers', 'gh_following_url': 'https://api.github.com/users/HumairAK/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/HumairAK/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/HumairAK/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/HumairAK/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/HumairAK/orgs', 'gh_repos_url': 'https://api.github.com/users/HumairAK/repos', 'gh_events_url': 'https://api.github.com/users/HumairAK/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/HumairAK/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T17:04:42Z', 'cntrb_full_name': 'Humair Khan'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100a665-8700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: humair88@hotmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email humair88@hotmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1ccda51aa537751014059d1b45cff687a15fad65 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/1ccda51aa537751014059d1b45cff687a15fad65 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jgb@gsyc.es +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000fdd-4d00-0000-0000-000000000000'), 'cntrb_login': 'jgbarah', 'cntrb_created_at': '2011-09-09T21:47:40Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'jgb@gsyc.es', 'gh_user_id': 1039693, 'gh_login': 'jgbarah', 'gh_url': 'https://api.github.com/users/jgbarah', 'gh_html_url': 'https://github.com/jgbarah', 'gh_node_id': 'MDQ6VXNlcjEwMzk2OTM=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1039693?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jgbarah/followers', 'gh_following_url': 'https://api.github.com/users/jgbarah/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jgbarah/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jgbarah/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jgbarah/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jgbarah/orgs', 'gh_repos_url': 'https://api.github.com/users/jgbarah/repos', 'gh_events_url': 'https://api.github.com/users/jgbarah/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jgbarah/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-15T15:39:10Z', 'cntrb_full_name': 'Jesus M. Gonzalez-Barahona'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000fdd-4d00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jgb@gsyc.es +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jgb@gsyc.es +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6a569a0926382bd14af2bf597a40a54e2f09ecf9 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-perceval-opnfv/commits/6a569a0926382bd14af2bf597a40a54e2f09ecf9 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 71036291+margarethaley@users.noreply.github.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01043bed-8300-0000-0000-000000000000'), 'cntrb_login': 'margarethaley', 'cntrb_created_at': '2020-09-09T18:12:26Z', 'cntrb_email': None, 'cntrb_company': 'College of the Holy Cross', 'cntrb_location': None, 'cntrb_canonical': '71036291+margarethaley@users.noreply.github.com', 'gh_user_id': 71036291, 'gh_login': 'margarethaley', 'gh_url': 'https://api.github.com/users/margarethaley', 'gh_html_url': 'https://github.com/margarethaley', 'gh_node_id': 'MDQ6VXNlcjcxMDM2Mjkx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/71036291?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/margarethaley/followers', 'gh_following_url': 'https://api.github.com/users/margarethaley/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/margarethaley/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/margarethaley/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/margarethaley/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/margarethaley/orgs', 'gh_repos_url': 'https://api.github.com/users/margarethaley/repos', 'gh_events_url': 'https://api.github.com/users/margarethaley/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/margarethaley/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-15T13:09:51Z', 'cntrb_full_name': 'Margaret Haley'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 01043bed-8300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 71036291+margarethaley@users.noreply.github.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 71036291+margarethaley@users.noreply.github.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 62295cfc17a4b6a6a71810bce1ad09e8dc8ce623 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/62295cfc17a4b6a6a71810bce1ad09e8dc8ce623 +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: gabe.heim@yahoo.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01013cd8-ae00-0000-0000-000000000000'), 'cntrb_login': 'gabe-heim', 'cntrb_created_at': '2016-08-01T04:50:19Z', 'cntrb_email': 'gabe.heim@yahoo.com', 'cntrb_company': None, 'cntrb_location': 'Austin, TX', 'cntrb_canonical': 'gabe.heim@yahoo.com', 'gh_user_id': 20764846, 'gh_login': 'gabe-heim', 'gh_url': 'https://api.github.com/users/gabe-heim', 'gh_html_url': 'https://github.com/gabe-heim', 'gh_node_id': 'MDQ6VXNlcjIwNzY0ODQ2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/20764846?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/gabe-heim/followers', 'gh_following_url': 'https://api.github.com/users/gabe-heim/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/gabe-heim/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/gabe-heim/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/gabe-heim/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/gabe-heim/orgs', 'gh_repos_url': 'https://api.github.com/users/gabe-heim/repos', 'gh_events_url': 'https://api.github.com/users/gabe-heim/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/gabe-heim/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-25T16:12:06Z', 'cntrb_full_name': 'Gabe Heim'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jsmanrique@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010011fa-c100-0000-0000-000000000000'), 'cntrb_login': 'jsmanrique', 'cntrb_created_at': '2011-11-07T16:00:27Z', 'cntrb_email': 'jsmanrique@gmail.com', 'cntrb_company': '@Inditex', 'cntrb_location': 'Northern Spain', 'cntrb_canonical': 'jsmanrique@gmail.com', 'gh_user_id': 1178305, 'gh_login': 'jsmanrique', 'gh_url': 'https://api.github.com/users/jsmanrique', 'gh_html_url': 'https://github.com/jsmanrique', 'gh_node_id': 'MDQ6VXNlcjExNzgzMDU=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1178305?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jsmanrique/followers', 'gh_following_url': 'https://api.github.com/users/jsmanrique/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jsmanrique/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jsmanrique/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jsmanrique/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jsmanrique/orgs', 'gh_repos_url': 'https://api.github.com/users/jsmanrique/repos', 'gh_events_url': 'https://api.github.com/users/jsmanrique/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jsmanrique/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T16:39:08Z', 'cntrb_full_name': 'Manrique Lopez'} +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 010011fa-c100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jsmanrique@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 01013cd8-ae00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: gabe.heim@yahoo.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email gabe.heim@yahoo.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jsmanrique@gmail.com +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 005820761d12aa0222b4cc42945f013f6843681b +2023-01-12 12:24:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/005820761d12aa0222b4cc42945f013f6843681b +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: dc451ac55b89beb7398afa4f39f9b1a3a68945b5 +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/grimoirelab-hatstall/commits/dc451ac55b89beb7398afa4f39f9b1a3a68945b5 +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: tcoufal@redhat.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010071ba-d200-0000-0000-000000000000'), 'cntrb_login': 'tumido', 'cntrb_created_at': '2014-04-30T20:36:34Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat', 'cntrb_location': 'Czech Republic', 'cntrb_canonical': 'tcoufal@redhat.com', 'gh_user_id': 7453394, 'gh_login': 'tumido', 'gh_url': 'https://api.github.com/users/tumido', 'gh_html_url': 'https://github.com/tumido', 'gh_node_id': 'MDQ6VXNlcjc0NTMzOTQ=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/7453394?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/tumido/followers', 'gh_following_url': 'https://api.github.com/users/tumido/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/tumido/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/tumido/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/tumido/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/tumido/orgs', 'gh_repos_url': 'https://api.github.com/users/tumido/repos', 'gh_events_url': 'https://api.github.com/users/tumido/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/tumido/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-23T12:21:31Z', 'cntrb_full_name': 'Tom Coufal'} +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 010071ba-d200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: tcoufal@redhat.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email tcoufal@redhat.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 20c2536f8a43c01d2cd0e7a4ba35ae868f6d6172 +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/20c2536f8a43c01d2cd0e7a4ba35ae868f6d6172 +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 49699333+dependabot[bot]@users.noreply.github.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102f65a-0500-0000-0000-000000000000'), 'cntrb_login': 'dependabot[bot]', 'cntrb_created_at': '2019-04-16T22:34:25Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '49699333+dependabot[bot]@users.noreply.github.com', 'gh_user_id': 49699333, 'gh_login': 'dependabot[bot]', 'gh_url': 'https://api.github.com/users/dependabot%5Bbot%5D', 'gh_html_url': 'https://github.com/apps/dependabot', 'gh_node_id': 'MDM6Qm90NDk2OTkzMzM=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/in/29110?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/dependabot%5Bbot%5D/followers', 'gh_following_url': 'https://api.github.com/users/dependabot%5Bbot%5D/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/dependabot%5Bbot%5D/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/dependabot%5Bbot%5D/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/dependabot%5Bbot%5D/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/dependabot%5Bbot%5D/orgs', 'gh_repos_url': 'https://api.github.com/users/dependabot%5Bbot%5D/repos', 'gh_events_url': 'https://api.github.com/users/dependabot%5Bbot%5D/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/dependabot%5Bbot%5D/received_events', 'gh_type': 'Bot', 'gh_site_admin': False, 'cntrb_last_used': '2019-05-23T08:22:16Z', 'cntrb_full_name': 'dependabot[bot]'} +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102f65a-0500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 49699333+dependabot[bot]@users.noreply.github.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 49699333+dependabot[bot]@users.noreply.github.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: bburns@redhat.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010054b5-be00-0000-0000-000000000000'), 'cntrb_login': 'billburnseh', 'cntrb_created_at': '2013-09-26T17:08:42Z', 'cntrb_email': 'bburns@redhat.com', 'cntrb_company': 'Red Hat Inc', 'cntrb_location': 'New Hampshire, US', 'cntrb_canonical': 'bburns@redhat.com', 'gh_user_id': 5551550, 'gh_login': 'billburnseh', 'gh_url': 'https://api.github.com/users/billburnseh', 'gh_html_url': 'https://github.com/billburnseh', 'gh_node_id': 'MDQ6VXNlcjU1NTE1NTA=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/5551550?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/billburnseh/followers', 'gh_following_url': 'https://api.github.com/users/billburnseh/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/billburnseh/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/billburnseh/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/billburnseh/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/billburnseh/orgs', 'gh_repos_url': 'https://api.github.com/users/billburnseh/repos', 'gh_events_url': 'https://api.github.com/users/billburnseh/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/billburnseh/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-12-02T14:17:22Z', 'cntrb_full_name': 'Bill Burns'} +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 010054b5-be00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: bburns@redhat.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email bburns@redhat.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 65c275187b9a1a8c95d0c5d75fd936db3b5ead84 +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/65c275187b9a1a8c95d0c5d75fd936db3b5ead84 +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 61482022+ABrain7710@users.noreply.github.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103aa24-2600-0000-0000-000000000000'), 'cntrb_login': 'ABrain7710', 'cntrb_created_at': '2020-02-26T00:26:44Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '61482022+ABrain7710@users.noreply.github.com', 'gh_user_id': 61482022, 'gh_login': 'ABrain7710', 'gh_url': 'https://api.github.com/users/ABrain7710', 'gh_html_url': 'https://github.com/ABrain7710', 'gh_node_id': 'MDQ6VXNlcjYxNDgyMDIy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/61482022?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ABrain7710/followers', 'gh_following_url': 'https://api.github.com/users/ABrain7710/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ABrain7710/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ABrain7710/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ABrain7710/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ABrain7710/orgs', 'gh_repos_url': 'https://api.github.com/users/ABrain7710/repos', 'gh_events_url': 'https://api.github.com/users/ABrain7710/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ABrain7710/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T17:23:02Z', 'cntrb_full_name': 'Andrew Brain'} +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103aa24-2600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 61482022+ABrain7710@users.noreply.github.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 61482022+ABrain7710@users.noreply.github.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: harshalmittal4@gmail.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101715b-f600-0000-0000-000000000000'), 'cntrb_login': 'harshalmittal4', 'cntrb_created_at': '2016-11-27T12:10:00Z', 'cntrb_email': 'harshalmittal4@gmail.com', 'cntrb_company': '@atlassian', 'cntrb_location': 'Roorkee, India', 'cntrb_canonical': 'harshalmittal4@gmail.com', 'gh_user_id': 24206326, 'gh_login': 'harshalmittal4', 'gh_url': 'https://api.github.com/users/harshalmittal4', 'gh_html_url': 'https://github.com/harshalmittal4', 'gh_node_id': 'MDQ6VXNlcjI0MjA2MzI2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24206326?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/harshalmittal4/followers', 'gh_following_url': 'https://api.github.com/users/harshalmittal4/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/harshalmittal4/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/harshalmittal4/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/harshalmittal4/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/harshalmittal4/orgs', 'gh_repos_url': 'https://api.github.com/users/harshalmittal4/repos', 'gh_events_url': 'https://api.github.com/users/harshalmittal4/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/harshalmittal4/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-13T12:40:19Z', 'cntrb_full_name': 'Harshal Mittal'} +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101715b-f600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: harshalmittal4@gmail.com +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email harshalmittal4@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: dmoreno@bitergia.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008d21-d000-0000-0000-000000000000'), 'cntrb_login': 'dlumbrer', 'cntrb_created_at': '2014-10-15T09:13:12Z', 'cntrb_email': 'dmorenolumb@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Spain', 'cntrb_canonical': 'dmorenolumb@gmail.com', 'gh_user_id': 9249232, 'gh_login': 'dlumbrer', 'gh_url': 'https://api.github.com/users/dlumbrer', 'gh_html_url': 'https://github.com/dlumbrer', 'gh_node_id': 'MDQ6VXNlcjkyNDkyMzI=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/9249232?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/dlumbrer/followers', 'gh_following_url': 'https://api.github.com/users/dlumbrer/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/dlumbrer/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/dlumbrer/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/dlumbrer/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/dlumbrer/orgs', 'gh_repos_url': 'https://api.github.com/users/dlumbrer/repos', 'gh_events_url': 'https://api.github.com/users/dlumbrer/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/dlumbrer/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-20T20:56:18Z', 'cntrb_full_name': 'David Moreno Lumbreras'} +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008d21-d000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: dmoreno@bitergia.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email dmoreno@bitergia.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: andrewbrain2019@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103aa24-2600-0000-0000-000000000000'), 'cntrb_login': 'ABrain7710', 'cntrb_created_at': '2020-02-26T00:26:44Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'andrewbrain2019@gmail.com', 'gh_user_id': 61482022, 'gh_login': 'ABrain7710', 'gh_url': 'https://api.github.com/users/ABrain7710', 'gh_html_url': 'https://github.com/ABrain7710', 'gh_node_id': 'MDQ6VXNlcjYxNDgyMDIy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/61482022?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ABrain7710/followers', 'gh_following_url': 'https://api.github.com/users/ABrain7710/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ABrain7710/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ABrain7710/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ABrain7710/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ABrain7710/orgs', 'gh_repos_url': 'https://api.github.com/users/ABrain7710/repos', 'gh_events_url': 'https://api.github.com/users/ABrain7710/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ABrain7710/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T17:23:02Z', 'cntrb_full_name': 'Andrew Brain'} +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: venuvardhanreddytekula8@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01018185-2b00-0000-0000-000000000000'), 'cntrb_login': 'vchrombie', 'cntrb_created_at': '2017-01-21T12:45:08Z', 'cntrb_email': 'vt2182@nyu.edu', 'cntrb_company': '@chaoss ex- @bitergia @amfoss', 'cntrb_location': 'Brooklyn, NY', 'cntrb_canonical': 'vt2182@nyu.edu', 'gh_user_id': 25265451, 'gh_login': 'vchrombie', 'gh_url': 'https://api.github.com/users/vchrombie', 'gh_html_url': 'https://github.com/vchrombie', 'gh_node_id': 'MDQ6VXNlcjI1MjY1NDUx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/25265451?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/vchrombie/followers', 'gh_following_url': 'https://api.github.com/users/vchrombie/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/vchrombie/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/vchrombie/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/vchrombie/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/vchrombie/orgs', 'gh_repos_url': 'https://api.github.com/users/vchrombie/repos', 'gh_events_url': 'https://api.github.com/users/vchrombie/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/vchrombie/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-08T18:55:42Z', 'cntrb_full_name': 'Venu Vardhan Reddy Tekula'} +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103aa24-2600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: andrewbrain2019@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 89909507+schwesig@users.noreply.github.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01055be9-0300-0000-0000-000000000000'), 'cntrb_login': 'schwesig', 'cntrb_created_at': '2021-09-01T09:50:37Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat', 'cntrb_location': 'Germany', 'cntrb_canonical': '89909507+schwesig@users.noreply.github.com', 'gh_user_id': 89909507, 'gh_login': 'schwesig', 'gh_url': 'https://api.github.com/users/schwesig', 'gh_html_url': 'https://github.com/schwesig', 'gh_node_id': 'MDQ6VXNlcjg5OTA5NTA3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/89909507?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/schwesig/followers', 'gh_following_url': 'https://api.github.com/users/schwesig/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/schwesig/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/schwesig/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/schwesig/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/schwesig/orgs', 'gh_repos_url': 'https://api.github.com/users/schwesig/repos', 'gh_events_url': 'https://api.github.com/users/schwesig/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/schwesig/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-16T19:19:23Z', 'cntrb_full_name': '\u200b/Thor(sten)?/ Schwesig'} +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 01018185-2b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: venuvardhanreddytekula8@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email andrewbrain2019@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 01055be9-0300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 89909507+schwesig@users.noreply.github.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email venuvardhanreddytekula8@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 89909507+schwesig@users.noreply.github.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:24:23,513: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[5b17661b-193a-437e-abe8-db972bef6e26] succeeded in 8.992692085999806s: None +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 3e3d1510f7c32f5d3b0661e9a12e7056952e35a2 +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/3e3d1510f7c32f5d3b0661e9a12e7056952e35a2 +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: shanand@redhat.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100880c-9e00-0000-0000-000000000000'), 'cntrb_login': 'Shreyanand', 'cntrb_created_at': '2014-09-25T14:14:47Z', 'cntrb_email': None, 'cntrb_company': 'Red Hat Inc.', 'cntrb_location': 'USA', 'cntrb_canonical': 'shanand@redhat.com', 'gh_user_id': 8916126, 'gh_login': 'Shreyanand', 'gh_url': 'https://api.github.com/users/Shreyanand', 'gh_html_url': 'https://github.com/Shreyanand', 'gh_node_id': 'MDQ6VXNlcjg5MTYxMjY=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8916126?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Shreyanand/followers', 'gh_following_url': 'https://api.github.com/users/Shreyanand/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Shreyanand/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Shreyanand/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Shreyanand/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Shreyanand/orgs', 'gh_repos_url': 'https://api.github.com/users/Shreyanand/repos', 'gh_events_url': 'https://api.github.com/users/Shreyanand/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Shreyanand/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T12:20:17Z', 'cntrb_full_name': 'Shrey'} +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100880c-9e00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: shanand@redhat.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email shanand@redhat.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 777199912a3cb9ed94ad1c9a68fdebd3c190039f +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/operate-first-twitter/commits/777199912a3cb9ed94ad1c9a68fdebd3c190039f +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: imilarsky@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010177f7-2400-0000-0000-000000000000'), 'cntrb_login': 'IsaacMilarky', 'cntrb_created_at': '2016-12-18T19:35:40Z', 'cntrb_email': 'imilarsky@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'imilarsky@gmail.com', 'gh_user_id': 24639268, 'gh_login': 'IsaacMilarky', 'gh_url': 'https://api.github.com/users/IsaacMilarky', 'gh_html_url': 'https://github.com/IsaacMilarky', 'gh_node_id': 'MDQ6VXNlcjI0NjM5MjY4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24639268?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/IsaacMilarky/followers', 'gh_following_url': 'https://api.github.com/users/IsaacMilarky/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/IsaacMilarky/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/IsaacMilarky/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/IsaacMilarky/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/IsaacMilarky/orgs', 'gh_repos_url': 'https://api.github.com/users/IsaacMilarky/repos', 'gh_events_url': 'https://api.github.com/users/IsaacMilarky/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/IsaacMilarky/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-29T17:34:46Z', 'cntrb_full_name': 'Isaac Milarsky'} +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 010177f7-2400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: imilarsky@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email imilarsky@gmail.com +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 00731972058bba70515c02462cbe459567be7924 +2023-01-12 12:24:23 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/00731972058bba70515c02462cbe459567be7924 +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: venu@chaoss.community +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01018185-2b00-0000-0000-000000000000'), 'cntrb_login': 'vchrombie', 'cntrb_created_at': '2017-01-21T12:45:08Z', 'cntrb_email': 'vt2182@nyu.edu', 'cntrb_company': '@chaoss ex- @bitergia @amfoss', 'cntrb_location': 'Brooklyn, NY', 'cntrb_canonical': 'vt2182@nyu.edu', 'gh_user_id': 25265451, 'gh_login': 'vchrombie', 'gh_url': 'https://api.github.com/users/vchrombie', 'gh_html_url': 'https://github.com/vchrombie', 'gh_node_id': 'MDQ6VXNlcjI1MjY1NDUx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/25265451?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/vchrombie/followers', 'gh_following_url': 'https://api.github.com/users/vchrombie/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/vchrombie/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/vchrombie/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/vchrombie/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/vchrombie/orgs', 'gh_repos_url': 'https://api.github.com/users/vchrombie/repos', 'gh_events_url': 'https://api.github.com/users/vchrombie/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/vchrombie/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-08T18:55:42Z', 'cntrb_full_name': 'Venu Vardhan Reddy Tekula'} +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO cntrb_id 01018185-2b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Creating alias for email: venu@chaoss.community +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Updating now resolved email venu@chaoss.community +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:24:24,315: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[ca42a9a2-3811-4b42-9771-edf03a1bf89a] succeeded in 10.212525199998709s: None +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: hveeradh@redhat.com +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100700b-fb00-0000-0000-000000000000'), 'cntrb_login': 'hemajv', 'cntrb_created_at': '2014-04-19T01:15:06Z', 'cntrb_email': 'hveeradh@redhat.com', 'cntrb_company': '@AICoE, @operate-first at Red Hat', 'cntrb_location': 'Boston', 'cntrb_canonical': 'hveeradh@redhat.com', 'gh_user_id': 7343099, 'gh_login': 'hemajv', 'gh_url': 'https://api.github.com/users/hemajv', 'gh_html_url': 'https://github.com/hemajv', 'gh_node_id': 'MDQ6VXNlcjczNDMwOTk=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/7343099?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/hemajv/followers', 'gh_following_url': 'https://api.github.com/users/hemajv/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/hemajv/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/hemajv/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/hemajv/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/hemajv/orgs', 'gh_repos_url': 'https://api.github.com/users/hemajv/repos', 'gh_events_url': 'https://api.github.com/users/hemajv/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/hemajv/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-04T12:45:07Z', 'cntrb_full_name': 'Hema Veeradhi'} +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100700b-fb00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Creating alias for email: hveeradh@redhat.com +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Updating now resolved email hveeradh@redhat.com +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 8b5816f0f218519bb36b1bd17f940c0c2c97f4c3 +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/operate-first/blueprint/commits/8b5816f0f218519bb36b1bd17f940c0c2c97f4c3 +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: kachau@redhat.com +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100e4f7-3c00-0000-0000-000000000000'), 'cntrb_login': 'chauhankaranraj', 'cntrb_created_at': '2015-10-06T22:25:22Z', 'cntrb_email': None, 'cntrb_company': 'Amazon Web Services', 'cntrb_location': 'Boston, MA', 'cntrb_canonical': 'kachau@redhat.com', 'gh_user_id': 15005500, 'gh_login': 'chauhankaranraj', 'gh_url': 'https://api.github.com/users/chauhankaranraj', 'gh_html_url': 'https://github.com/chauhankaranraj', 'gh_node_id': 'MDQ6VXNlcjE1MDA1NTAw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/15005500?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/chauhankaranraj/followers', 'gh_following_url': 'https://api.github.com/users/chauhankaranraj/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/chauhankaranraj/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/chauhankaranraj/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/chauhankaranraj/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/chauhankaranraj/orgs', 'gh_repos_url': 'https://api.github.com/users/chauhankaranraj/repos', 'gh_events_url': 'https://api.github.com/users/chauhankaranraj/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/chauhankaranraj/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-07T04:07:44Z', 'cntrb_full_name': 'Karanraj Chauhan'} +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100e4f7-3c00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Creating alias for email: kachau@redhat.com +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Updating now resolved email kachau@redhat.com +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: msnell@unomaha.edu +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01021550-ba00-0000-0000-000000000000'), 'cntrb_login': 'Nebrethar', 'cntrb_created_at': '2017-12-30T01:03:56Z', 'cntrb_email': None, 'cntrb_company': 'University of Nebraska at Omaha', 'cntrb_location': 'Omaha, NE, USA', 'cntrb_canonical': 'msnell@unomaha.edu', 'gh_user_id': 34951354, 'gh_login': 'Nebrethar', 'gh_url': 'https://api.github.com/users/Nebrethar', 'gh_html_url': 'https://github.com/Nebrethar', 'gh_node_id': 'MDQ6VXNlcjM0OTUxMzU0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/34951354?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Nebrethar/followers', 'gh_following_url': 'https://api.github.com/users/Nebrethar/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Nebrethar/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Nebrethar/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Nebrethar/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Nebrethar/orgs', 'gh_repos_url': 'https://api.github.com/users/Nebrethar/repos', 'gh_events_url': 'https://api.github.com/users/Nebrethar/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Nebrethar/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-06T17:46:00Z', 'cntrb_full_name': 'Matt Cantu Snell'} +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO cntrb_id 01021550-ba00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Creating alias for email: msnell@unomaha.edu +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Updating now resolved email msnell@unomaha.edu +2023-01-12 12:24:24 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:24:24,954: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[63dcd9f8-f3ef-41f5-ad82-6d71247779eb] succeeded in 11.272008066000126s: None +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: gabe@gabehe.im +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01013cd8-ae00-0000-0000-000000000000'), 'cntrb_login': 'gabe-heim', 'cntrb_created_at': '2016-08-01T04:50:19Z', 'cntrb_email': 'gabe.heim@yahoo.com', 'cntrb_company': None, 'cntrb_location': 'Austin, TX', 'cntrb_canonical': 'gabe.heim@yahoo.com', 'gh_user_id': 20764846, 'gh_login': 'gabe-heim', 'gh_url': 'https://api.github.com/users/gabe-heim', 'gh_html_url': 'https://github.com/gabe-heim', 'gh_node_id': 'MDQ6VXNlcjIwNzY0ODQ2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/20764846?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/gabe-heim/followers', 'gh_following_url': 'https://api.github.com/users/gabe-heim/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/gabe-heim/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/gabe-heim/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/gabe-heim/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/gabe-heim/orgs', 'gh_repos_url': 'https://api.github.com/users/gabe-heim/repos', 'gh_events_url': 'https://api.github.com/users/gabe-heim/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/gabe-heim/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-25T16:12:06Z', 'cntrb_full_name': 'Gabe Heim'} +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO cntrb_id 01013cd8-ae00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Creating alias for email: gabe@gabehe.im +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Updating now resolved email gabe@gabehe.im +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 00e71cc67ab702dd8e8490ba5dd527e441774ffd +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/00e71cc67ab702dd8e8490ba5dd527e441774ffd +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: harshadreddy16@gmail.com +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100d60d-1a00-0000-0000-000000000000'), 'cntrb_login': 'harshad16', 'cntrb_created_at': '2015-08-29T09:37:07Z', 'cntrb_email': 'harshadreddy16@gmail.com', 'cntrb_company': 'Red Hat @AICoE ', 'cntrb_location': 'Boston', 'cntrb_canonical': 'harshadreddy16@gmail.com', 'gh_user_id': 14028058, 'gh_login': 'harshad16', 'gh_url': 'https://api.github.com/users/harshad16', 'gh_html_url': 'https://github.com/harshad16', 'gh_node_id': 'MDQ6VXNlcjE0MDI4MDU4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/14028058?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/harshad16/followers', 'gh_following_url': 'https://api.github.com/users/harshad16/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/harshad16/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/harshad16/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/harshad16/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/harshad16/orgs', 'gh_repos_url': 'https://api.github.com/users/harshad16/repos', 'gh_events_url': 'https://api.github.com/users/harshad16/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/harshad16/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T14:34:24Z', 'cntrb_full_name': 'Harshad Reddy Nalla'} +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100d60d-1a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Creating alias for email: harshadreddy16@gmail.com +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Updating now resolved email harshadreddy16@gmail.com +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mrhaley45@gmail.com +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01043bed-8300-0000-0000-000000000000'), 'cntrb_login': 'margarethaley', 'cntrb_created_at': '2020-09-09T18:12:26Z', 'cntrb_email': None, 'cntrb_company': 'College of the Holy Cross', 'cntrb_location': None, 'cntrb_canonical': 'mrhaley45@gmail.com', 'gh_user_id': 71036291, 'gh_login': 'margarethaley', 'gh_url': 'https://api.github.com/users/margarethaley', 'gh_html_url': 'https://github.com/margarethaley', 'gh_node_id': 'MDQ6VXNlcjcxMDM2Mjkx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/71036291?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/margarethaley/followers', 'gh_following_url': 'https://api.github.com/users/margarethaley/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/margarethaley/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/margarethaley/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/margarethaley/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/margarethaley/orgs', 'gh_repos_url': 'https://api.github.com/users/margarethaley/repos', 'gh_events_url': 'https://api.github.com/users/margarethaley/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/margarethaley/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-15T13:09:51Z', 'cntrb_full_name': 'Margaret Haley'} +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO cntrb_id 01043bed-8300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mrhaley45@gmail.com +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:25 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mrhaley45@gmail.com +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: cmehil.warn@gmail.com +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100f612-c500-0000-0000-000000000000'), 'cntrb_login': 'ChristianCme', 'cntrb_created_at': '2015-12-03T00:55:28Z', 'cntrb_email': 'ccwarn@mit.edu', 'cntrb_company': None, 'cntrb_location': 'Cambridge, MA', 'cntrb_canonical': 'ccwarn@mit.edu', 'gh_user_id': 16126661, 'gh_login': 'ChristianCme', 'gh_url': 'https://api.github.com/users/ChristianCme', 'gh_html_url': 'https://github.com/ChristianCme', 'gh_node_id': 'MDQ6VXNlcjE2MTI2NjYx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/16126661?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ChristianCme/followers', 'gh_following_url': 'https://api.github.com/users/ChristianCme/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ChristianCme/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ChristianCme/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ChristianCme/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ChristianCme/orgs', 'gh_repos_url': 'https://api.github.com/users/ChristianCme/repos', 'gh_events_url': 'https://api.github.com/users/ChristianCme/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ChristianCme/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-08T05:28:19Z', 'cntrb_full_name': 'Christian Cmehil-Warn'} +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100f612-c500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO Creating alias for email: cmehil.warn@gmail.com +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO Updating now resolved email cmehil.warn@gmail.com +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 00f399e4ed0f8810a68759d31eee3a52fd8af1ed +2023-01-12 12:24:26 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/00f399e4ed0f8810a68759d31eee3a52fd8af1ed +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:24:27,425: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[c34794a2-bb3a-4b56-9947-e27a2322f4cf] succeeded in 13.54519013600111s: None +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: c@carterlandis.com +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101e100-7300-0000-0000-000000000000'), 'cntrb_login': 'ccarterlandis', 'cntrb_created_at': '2017-08-31T22:01:37Z', 'cntrb_email': 'c@carterlandis.com', 'cntrb_company': '@Gusto', 'cntrb_location': None, 'cntrb_canonical': 'c@carterlandis.com', 'gh_user_id': 31522931, 'gh_login': 'ccarterlandis', 'gh_url': 'https://api.github.com/users/ccarterlandis', 'gh_html_url': 'https://github.com/ccarterlandis', 'gh_node_id': 'MDQ6VXNlcjMxNTIyOTMx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/31522931?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ccarterlandis/followers', 'gh_following_url': 'https://api.github.com/users/ccarterlandis/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ccarterlandis/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ccarterlandis/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ccarterlandis/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ccarterlandis/orgs', 'gh_repos_url': 'https://api.github.com/users/ccarterlandis/repos', 'gh_events_url': 'https://api.github.com/users/ccarterlandis/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ccarterlandis/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T17:00:36Z', 'cntrb_full_name': 'Carter Landis'} +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101e100-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO Creating alias for email: c@carterlandis.com +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO Updating now resolved email c@carterlandis.com +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0107e322e7c26718f13db5e1aaa8c2ffca70afc7 +2023-01-12 12:24:27 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0107e322e7c26718f13db5e1aaa8c2ffca70afc7 +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jonah.zukosky@gmail.com +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01016318-0c00-0000-0000-000000000000'), 'cntrb_login': 'jzukosky', 'cntrb_created_at': '2016-11-05T02:23:20Z', 'cntrb_email': 'jonah.zukosky@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Cambridge, Massachusetts', 'cntrb_canonical': 'jonah.zukosky@gmail.com', 'gh_user_id': 23271436, 'gh_login': 'jzukosky', 'gh_url': 'https://api.github.com/users/jzukosky', 'gh_html_url': 'https://github.com/jzukosky', 'gh_node_id': 'MDQ6VXNlcjIzMjcxNDM2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/23271436?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jzukosky/followers', 'gh_following_url': 'https://api.github.com/users/jzukosky/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jzukosky/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jzukosky/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jzukosky/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jzukosky/orgs', 'gh_repos_url': 'https://api.github.com/users/jzukosky/repos', 'gh_events_url': 'https://api.github.com/users/jzukosky/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jzukosky/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T12:31:18Z', 'cntrb_full_name': 'Jonah Zukosky'} +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO cntrb_id 01016318-0c00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jonah.zukosky@gmail.com +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jonah.zukosky@gmail.com +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 018465c98e2444cd4f48843723d44c0c7da3e6d3 +2023-01-12 12:24:30 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/018465c98e2444cd4f48843723d44c0c7da3e6d3 +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: doombreakr@gmail.com +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000cc1-1000-0000-0000-000000000000'), 'cntrb_login': 'howderek', 'cntrb_created_at': '2011-06-07T19:14:38Z', 'cntrb_email': 'derek@howderek.com', 'cntrb_company': '@esnet @chaoss', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 'derek@howderek.com', 'gh_user_id': 835856, 'gh_login': 'howderek', 'gh_url': 'https://api.github.com/users/howderek', 'gh_html_url': 'https://github.com/howderek', 'gh_node_id': 'MDQ6VXNlcjgzNTg1Ng==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/835856?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/howderek/followers', 'gh_following_url': 'https://api.github.com/users/howderek/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/howderek/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/howderek/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/howderek/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/howderek/orgs', 'gh_repos_url': 'https://api.github.com/users/howderek/repos', 'gh_events_url': 'https://api.github.com/users/howderek/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/howderek/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-10-20T23:58:25Z', 'cntrb_full_name': 'Derek Howard'} +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000cc1-1000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO Creating alias for email: doombreakr@gmail.com +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO Updating now resolved email doombreakr@gmail.com +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 01968f1b96cb96f26ff411321e1e8bf0bacbccd5 +2023-01-12 12:24:31 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/01968f1b96cb96f26ff411321e1e8bf0bacbccd5 +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: root@bing0ne.com +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100f37d-9100-0000-0000-000000000000'), 'cntrb_login': 'bing0n3', 'cntrb_created_at': '2015-11-21T15:25:24Z', 'cntrb_email': 'ma@bing0ne.com', 'cntrb_company': 'ByteDance', 'cntrb_location': 'Hangzhou, China', 'cntrb_canonical': 'ma@bing0ne.com', 'gh_user_id': 15957393, 'gh_login': 'bing0n3', 'gh_url': 'https://api.github.com/users/bing0n3', 'gh_html_url': 'https://github.com/bing0n3', 'gh_node_id': 'MDQ6VXNlcjE1OTU3Mzkz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/15957393?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/bing0n3/followers', 'gh_following_url': 'https://api.github.com/users/bing0n3/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/bing0n3/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/bing0n3/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/bing0n3/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/bing0n3/orgs', 'gh_repos_url': 'https://api.github.com/users/bing0n3/repos', 'gh_events_url': 'https://api.github.com/users/bing0n3/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/bing0n3/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-10T02:39:26Z', 'cntrb_full_name': 'Bingwen Ma'} +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100f37d-9100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Creating alias for email: root@bing0ne.com +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Updating now resolved email root@bing0ne.com +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 01e98b073c498c3a5be33b7493e2a2b3e9ddeb77 +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/01e98b073c498c3a5be33b7493e2a2b3e9ddeb77 +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: parth261297@gmail.com +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010153aa-4300-0000-0000-000000000000'), 'cntrb_login': 'parthsharma2', 'cntrb_created_at': '2016-09-17T16:19:41Z', 'cntrb_email': None, 'cntrb_company': '@asetalias @chaoss', 'cntrb_location': 'New Delhi, India', 'cntrb_canonical': 'parth261297@gmail.com', 'gh_user_id': 22260291, 'gh_login': 'parthsharma2', 'gh_url': 'https://api.github.com/users/parthsharma2', 'gh_html_url': 'https://github.com/parthsharma2', 'gh_node_id': 'MDQ6VXNlcjIyMjYwMjkx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/22260291?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/parthsharma2/followers', 'gh_following_url': 'https://api.github.com/users/parthsharma2/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/parthsharma2/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/parthsharma2/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/parthsharma2/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/parthsharma2/orgs', 'gh_repos_url': 'https://api.github.com/users/parthsharma2/repos', 'gh_events_url': 'https://api.github.com/users/parthsharma2/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/parthsharma2/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-09-05T06:49:23Z', 'cntrb_full_name': 'Parth Sharma'} +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO cntrb_id 010153aa-4300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Creating alias for email: parth261297@gmail.com +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:32 blueberry insert_facade_contributors[59440] INFO Updating now resolved email parth261297@gmail.com +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: derek@howderek.com +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000cc1-1000-0000-0000-000000000000'), 'cntrb_login': 'howderek', 'cntrb_created_at': '2011-06-07T19:14:38Z', 'cntrb_email': 'derek@howderek.com', 'cntrb_company': '@esnet @chaoss', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 'derek@howderek.com', 'gh_user_id': 835856, 'gh_login': 'howderek', 'gh_url': 'https://api.github.com/users/howderek', 'gh_html_url': 'https://github.com/howderek', 'gh_node_id': 'MDQ6VXNlcjgzNTg1Ng==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/835856?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/howderek/followers', 'gh_following_url': 'https://api.github.com/users/howderek/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/howderek/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/howderek/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/howderek/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/howderek/orgs', 'gh_repos_url': 'https://api.github.com/users/howderek/repos', 'gh_events_url': 'https://api.github.com/users/howderek/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/howderek/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-10-20T23:58:25Z', 'cntrb_full_name': 'Derek Howard'} +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000cc1-1000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO Creating alias for email: derek@howderek.com +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO Updating now resolved email derek@howderek.com +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 020565babbf4269291b043053d0b4a5a1fa280eb +2023-01-12 12:24:33 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/020565babbf4269291b043053d0b4a5a1fa280eb +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: abhinavbajpai2012@gmail.com +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010173a4-2900-0000-0000-000000000000'), 'cntrb_login': 'abhinavbajpai2012', 'cntrb_created_at': '2016-12-03T14:22:04Z', 'cntrb_email': None, 'cntrb_company': 'Indian Institute of Technology Dhanbad', 'cntrb_location': 'Dhanbad', 'cntrb_canonical': 'abhinavbajpai2012@gmail.com', 'gh_user_id': 24355881, 'gh_login': 'abhinavbajpai2012', 'gh_url': 'https://api.github.com/users/abhinavbajpai2012', 'gh_html_url': 'https://github.com/abhinavbajpai2012', 'gh_node_id': 'MDQ6VXNlcjI0MzU1ODgx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24355881?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/abhinavbajpai2012/followers', 'gh_following_url': 'https://api.github.com/users/abhinavbajpai2012/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/abhinavbajpai2012/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/abhinavbajpai2012/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/abhinavbajpai2012/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/abhinavbajpai2012/orgs', 'gh_repos_url': 'https://api.github.com/users/abhinavbajpai2012/repos', 'gh_events_url': 'https://api.github.com/users/abhinavbajpai2012/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/abhinavbajpai2012/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-03-23T18:02:17Z', 'cntrb_full_name': 'Abhinav Bajpai'} +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO cntrb_id 010173a4-2900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO Creating alias for email: abhinavbajpai2012@gmail.com +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO Updating now resolved email abhinavbajpai2012@gmail.com +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0210f45a0e3ce3781d6a29472feb1f3143d9ffdf +2023-01-12 12:24:34 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0210f45a0e3ce3781d6a29472feb1f3143d9ffdf +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ulincsys@gmail.com +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100b728-7300-0000-0000-000000000000'), 'cntrb_login': 'Ulincsys', 'cntrb_created_at': '2015-04-18T04:55:05Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'ulincsys@gmail.com', 'gh_user_id': 12003443, 'gh_login': 'Ulincsys', 'gh_url': 'https://api.github.com/users/Ulincsys', 'gh_html_url': 'https://github.com/Ulincsys', 'gh_node_id': 'MDQ6VXNlcjEyMDAzNDQz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/12003443?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Ulincsys/followers', 'gh_following_url': 'https://api.github.com/users/Ulincsys/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Ulincsys/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Ulincsys/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Ulincsys/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Ulincsys/orgs', 'gh_repos_url': 'https://api.github.com/users/Ulincsys/repos', 'gh_events_url': 'https://api.github.com/users/Ulincsys/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Ulincsys/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T16:25:31Z', 'cntrb_full_name': 'John Kieran'} +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100b728-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ulincsys@gmail.com +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ulincsys@gmail.com +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 021f661885d45aa5cd782397663059a690468bee +2023-01-12 12:24:35 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/021f661885d45aa5cd782397663059a690468bee +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: diananova2508@gmail.com +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101758b-a700-0000-0000-000000000000'), 'cntrb_login': 'diananova', 'cntrb_created_at': '2016-12-09T17:07:52Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Genoa', 'cntrb_canonical': 'diananova2508@gmail.com', 'gh_user_id': 24480679, 'gh_login': 'diananova', 'gh_url': 'https://api.github.com/users/diananova', 'gh_html_url': 'https://github.com/diananova', 'gh_node_id': 'MDQ6VXNlcjI0NDgwNjc5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24480679?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/diananova/followers', 'gh_following_url': 'https://api.github.com/users/diananova/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/diananova/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/diananova/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/diananova/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/diananova/orgs', 'gh_repos_url': 'https://api.github.com/users/diananova/repos', 'gh_events_url': 'https://api.github.com/users/diananova/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/diananova/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-04T15:56:40Z', 'cntrb_full_name': 'Diana Mukhanova'} +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101758b-a700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Creating alias for email: diananova2508@gmail.com +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Updating now resolved email diananova2508@gmail.com +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mbwpony@gmail.com +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100f37d-9100-0000-0000-000000000000'), 'cntrb_login': 'bing0n3', 'cntrb_created_at': '2015-11-21T15:25:24Z', 'cntrb_email': 'ma@bing0ne.com', 'cntrb_company': 'ByteDance', 'cntrb_location': 'Hangzhou, China', 'cntrb_canonical': 'ma@bing0ne.com', 'gh_user_id': 15957393, 'gh_login': 'bing0n3', 'gh_url': 'https://api.github.com/users/bing0n3', 'gh_html_url': 'https://github.com/bing0n3', 'gh_node_id': 'MDQ6VXNlcjE1OTU3Mzkz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/15957393?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/bing0n3/followers', 'gh_following_url': 'https://api.github.com/users/bing0n3/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/bing0n3/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/bing0n3/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/bing0n3/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/bing0n3/orgs', 'gh_repos_url': 'https://api.github.com/users/bing0n3/repos', 'gh_events_url': 'https://api.github.com/users/bing0n3/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/bing0n3/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-10T02:39:26Z', 'cntrb_full_name': 'Bingwen Ma'} +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100f37d-9100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mbwpony@gmail.com +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mbwpony@gmail.com +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02347a86bfb7cdaeddb59b26425375e3af92ec15 +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/02347a86bfb7cdaeddb59b26425375e3af92ec15 +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:36 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Abhinav - FOSS', 'hash': '02347a86bfb7cdaeddb59b26425375e3af92ec15', 'email_raw': 'abhinav-foss@abajpai-ltm.internal.salesforce.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=abhinav-foss@abajpai-ltm.internal.salesforce.com+in:email+type:user +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from abhinav-foss@abajpai-ltm.internal.salesforce.com +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'abhinav-foss@abajpai-ltm.internal.salesforce.com', 'name': 'Abhinav - FOSS'} +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'theawless', 'id': 15650532, 'node_id': 'MDQ6VXNlcjE1NjUwNTMy', 'avatar_url': 'https://avatars.githubusercontent.com/u/15650532?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/theawless', 'html_url': 'https://github.com/theawless', 'followers_url': 'https://api.github.com/users/theawless/followers', 'following_url': 'https://api.github.com/users/theawless/following{/other_user}', 'gists_url': 'https://api.github.com/users/theawless/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/theawless/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/theawless/subscriptions', 'organizations_url': 'https://api.github.com/users/theawless/orgs', 'repos_url': 'https://api.github.com/users/theawless/repos', 'events_url': 'https://api.github.com/users/theawless/events{/privacy}', 'received_events_url': 'https://api.github.com/users/theawless/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: abhinav-foss@abajpai-ltm.internal.salesforce.com +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100eece-e400-0000-0000-000000000000'), 'cntrb_login': 'theawless', 'cntrb_created_at': '2015-11-04T11:52:59Z', 'cntrb_email': 'theawless@gmail.com', 'cntrb_company': 'AWS', 'cntrb_location': 'Seattle, USA', 'cntrb_canonical': 'theawless@gmail.com', 'gh_user_id': 15650532, 'gh_login': 'theawless', 'gh_url': 'https://api.github.com/users/theawless', 'gh_html_url': 'https://github.com/theawless', 'gh_node_id': 'MDQ6VXNlcjE1NjUwNTMy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/15650532?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/theawless/followers', 'gh_following_url': 'https://api.github.com/users/theawless/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/theawless/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/theawless/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/theawless/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/theawless/orgs', 'gh_repos_url': 'https://api.github.com/users/theawless/repos', 'gh_events_url': 'https://api.github.com/users/theawless/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/theawless/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T19:44:14Z', 'cntrb_full_name': 'Abhinav Singh'} +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100eece-e400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Creating alias for email: abhinav-foss@abajpai-ltm.internal.salesforce.com +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Updating now resolved email abhinav-foss@abajpai-ltm.internal.salesforce.com +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 025827b8ffc6bba619d861d865b5f02ffff06dd8 +2023-01-12 12:24:37 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/025827b8ffc6bba619d861d865b5f02ffff06dd8 +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: abuhman@users.noreply.github.com +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010105ea-2600-0000-0000-000000000000'), 'cntrb_login': 'abuhman', 'cntrb_created_at': '2016-02-10T17:37:42Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'abuhman@users.noreply.github.com', 'gh_user_id': 17164838, 'gh_login': 'abuhman', 'gh_url': 'https://api.github.com/users/abuhman', 'gh_html_url': 'https://github.com/abuhman', 'gh_node_id': 'MDQ6VXNlcjE3MTY0ODM4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/17164838?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/abuhman/followers', 'gh_following_url': 'https://api.github.com/users/abuhman/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/abuhman/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/abuhman/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/abuhman/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/abuhman/orgs', 'gh_repos_url': 'https://api.github.com/users/abuhman/repos', 'gh_events_url': 'https://api.github.com/users/abuhman/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/abuhman/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2020-11-03T17:58:45Z', 'cntrb_full_name': 'abuhman'} +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO cntrb_id 010105ea-2600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO Creating alias for email: abuhman@users.noreply.github.com +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO Updating now resolved email abuhman@users.noreply.github.com +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 026cf75e56ac9c156d554b7227c40753843b298f +2023-01-12 12:24:38 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/026cf75e56ac9c156d554b7227c40753843b298f +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: aksharap.181it132@nitk.edu.in +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01018530-b300-0000-0000-000000000000'), 'cntrb_login': 'aksh555', 'cntrb_created_at': '2017-02-02T12:58:07Z', 'cntrb_email': 'akshblr555@gmail.com', 'cntrb_company': '@chaoss, @IEEE-NITK', 'cntrb_location': 'NJ, USA', 'cntrb_canonical': 'akshblr555@gmail.com', 'gh_user_id': 25505971, 'gh_login': 'aksh555', 'gh_url': 'https://api.github.com/users/aksh555', 'gh_html_url': 'https://github.com/aksh555', 'gh_node_id': 'MDQ6VXNlcjI1NTA1OTcx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/25505971?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/aksh555/followers', 'gh_following_url': 'https://api.github.com/users/aksh555/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/aksh555/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/aksh555/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/aksh555/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/aksh555/orgs', 'gh_repos_url': 'https://api.github.com/users/aksh555/repos', 'gh_events_url': 'https://api.github.com/users/aksh555/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/aksh555/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-20T12:50:46Z', 'cntrb_full_name': 'Akshara Prabhakar'} +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO cntrb_id 01018530-b300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Creating alias for email: aksharap.181it132@nitk.edu.in +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Updating now resolved email aksharap.181it132@nitk.edu.in +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02a65d5a178356d81d65240ca26052dae4528b89 +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/02a65d5a178356d81d65240ca26052dae4528b89 +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Ulincsys', 'hash': '02a65d5a178356d81d65240ca26052dae4528b89', 'email_raw': '', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Email less than two characters +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02c58aec7d76ec4e64c753656cb4b894af1cb0cb +2023-01-12 12:24:39 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/02c58aec7d76ec4e64c753656cb4b894af1cb0cb +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: dhruvhsachdev@gmail.com +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010359e6-0a00-0000-0000-000000000000'), 'cntrb_login': 'Dhruv-Sachdev1313', 'cntrb_created_at': '2019-10-06T12:20:20Z', 'cntrb_email': None, 'cntrb_company': 'BeyondIRR', 'cntrb_location': 'Mumbai, India', 'cntrb_canonical': 'dhruvhsachdev@gmail.com', 'gh_user_id': 56223242, 'gh_login': 'Dhruv-Sachdev1313', 'gh_url': 'https://api.github.com/users/Dhruv-Sachdev1313', 'gh_html_url': 'https://github.com/Dhruv-Sachdev1313', 'gh_node_id': 'MDQ6VXNlcjU2MjIzMjQy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/56223242?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Dhruv-Sachdev1313/followers', 'gh_following_url': 'https://api.github.com/users/Dhruv-Sachdev1313/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Dhruv-Sachdev1313/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Dhruv-Sachdev1313/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Dhruv-Sachdev1313/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Dhruv-Sachdev1313/orgs', 'gh_repos_url': 'https://api.github.com/users/Dhruv-Sachdev1313/repos', 'gh_events_url': 'https://api.github.com/users/Dhruv-Sachdev1313/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Dhruv-Sachdev1313/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T13:08:49Z', 'cntrb_full_name': 'Dhruv Sachdev'} +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO cntrb_id 010359e6-0a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO Creating alias for email: dhruvhsachdev@gmail.com +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO Updating now resolved email dhruvhsachdev@gmail.com +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 02cfe83b7c6da2dc9f438045fd8236d00add03bd +2023-01-12 12:24:40 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/02cfe83b7c6da2dc9f438045fd8236d00add03bd +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: pogayo17@alustudent.com +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010255e5-b200-0000-0000-000000000000'), 'cntrb_login': 'Pogayo', 'cntrb_created_at': '2018-05-11T07:21:32Z', 'cntrb_email': 'perezogayo@gmail.com', 'cntrb_company': 'Carnegie Mellon University', 'cntrb_location': 'Pittsburgh', 'cntrb_canonical': 'perezogayo@gmail.com', 'gh_user_id': 39183794, 'gh_login': 'Pogayo', 'gh_url': 'https://api.github.com/users/Pogayo', 'gh_html_url': 'https://github.com/Pogayo', 'gh_node_id': 'MDQ6VXNlcjM5MTgzNzk0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/39183794?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Pogayo/followers', 'gh_following_url': 'https://api.github.com/users/Pogayo/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Pogayo/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Pogayo/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Pogayo/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Pogayo/orgs', 'gh_repos_url': 'https://api.github.com/users/Pogayo/repos', 'gh_events_url': 'https://api.github.com/users/Pogayo/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Pogayo/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-06T00:00:22Z', 'cntrb_full_name': 'Perez Ogayo'} +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO cntrb_id 010255e5-b200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Creating alias for email: pogayo17@alustudent.com +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Updating now resolved email pogayo17@alustudent.com +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 034985555c64fb2150ca7bf177474bbc49603d76 +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/034985555c64fb2150ca7bf177474bbc49603d76 +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:24:41 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Andrew', 'hash': '034985555c64fb2150ca7bf177474bbc49603d76', 'email_raw': 'andrew@root', 'resolution_status': 'not_unresolved'} +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=andrew@root+in:email+type:user +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from andrew@root +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'andrew@root', 'name': 'Andrew'} +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ccarterlandis@pm.me +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101e100-7300-0000-0000-000000000000'), 'cntrb_login': 'ccarterlandis', 'cntrb_created_at': '2017-08-31T22:01:37Z', 'cntrb_email': 'c@carterlandis.com', 'cntrb_company': '@Gusto', 'cntrb_location': None, 'cntrb_canonical': 'c@carterlandis.com', 'gh_user_id': 31522931, 'gh_login': 'ccarterlandis', 'gh_url': 'https://api.github.com/users/ccarterlandis', 'gh_html_url': 'https://github.com/ccarterlandis', 'gh_node_id': 'MDQ6VXNlcjMxNTIyOTMx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/31522931?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ccarterlandis/followers', 'gh_following_url': 'https://api.github.com/users/ccarterlandis/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ccarterlandis/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ccarterlandis/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ccarterlandis/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ccarterlandis/orgs', 'gh_repos_url': 'https://api.github.com/users/ccarterlandis/repos', 'gh_events_url': 'https://api.github.com/users/ccarterlandis/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ccarterlandis/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T17:00:36Z', 'cntrb_full_name': 'Carter Landis'} +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101e100-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ccarterlandis@pm.me +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ccarterlandis@pm.me +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 04173f6ac4a951d5d07f36b51d102a7def63a5d3 +2023-01-12 12:24:42 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/04173f6ac4a951d5d07f36b51d102a7def63a5d3 +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: germonprez@gmail.com +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000a03-5000-0000-0000-000000000000'), 'cntrb_login': 'germonprez', 'cntrb_created_at': '2011-03-07T19:14:09Z', 'cntrb_email': 'germonprez@gmail.com', 'cntrb_company': 'University of Nebraska at Omaha', 'cntrb_location': 'Omaha, NE', 'cntrb_canonical': 'germonprez@gmail.com', 'gh_user_id': 656208, 'gh_login': 'germonprez', 'gh_url': 'https://api.github.com/users/germonprez', 'gh_html_url': 'https://github.com/germonprez', 'gh_node_id': 'MDQ6VXNlcjY1NjIwOA==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/656208?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/germonprez/followers', 'gh_following_url': 'https://api.github.com/users/germonprez/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/germonprez/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/germonprez/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/germonprez/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/germonprez/orgs', 'gh_repos_url': 'https://api.github.com/users/germonprez/repos', 'gh_events_url': 'https://api.github.com/users/germonprez/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/germonprez/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T22:38:58Z', 'cntrb_full_name': 'Matt Germonprez'} +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000a03-5000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO Creating alias for email: germonprez@gmail.com +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO Updating now resolved email germonprez@gmail.com +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 042dd7a899c65a3bf502304c339a89074d0bf4cb +2023-01-12 12:24:43 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/042dd7a899c65a3bf502304c339a89074d0bf4cb +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 71280528+oma131@users.noreply.github.com +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01043fa7-9000-0000-0000-000000000000'), 'cntrb_login': 'oma131', 'cntrb_created_at': '2020-09-14T16:22:43Z', 'cntrb_email': 'anosikeihuoma21@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'anosikeihuoma21@gmail.com', 'gh_user_id': 71280528, 'gh_login': 'oma131', 'gh_url': 'https://api.github.com/users/oma131', 'gh_html_url': 'https://github.com/oma131', 'gh_node_id': 'MDQ6VXNlcjcxMjgwNTI4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/71280528?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/oma131/followers', 'gh_following_url': 'https://api.github.com/users/oma131/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/oma131/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/oma131/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/oma131/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/oma131/orgs', 'gh_repos_url': 'https://api.github.com/users/oma131/repos', 'gh_events_url': 'https://api.github.com/users/oma131/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/oma131/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-01T10:09:03Z', 'cntrb_full_name': 'Oma Anosike'} +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO cntrb_id 01043fa7-9000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 71280528+oma131@users.noreply.github.com +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 71280528+oma131@users.noreply.github.com +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 04443a1b58c6620ae6750f6a6ebb6e8df33cb605 +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/04443a1b58c6620ae6750f6a6ebb6e8df33cb605 +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: linkgeorg@gmail.com +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008739-4500-0000-0000-000000000000'), 'cntrb_login': 'GeorgLink', 'cntrb_created_at': '2014-09-22T13:56:39Z', 'cntrb_email': 'linkgeorg@gmail.com', 'cntrb_company': 'Bitergia', 'cntrb_location': 'Omaha, NE, USA', 'cntrb_canonical': 'linkgeorg@gmail.com', 'gh_user_id': 8862021, 'gh_login': 'GeorgLink', 'gh_url': 'https://api.github.com/users/GeorgLink', 'gh_html_url': 'https://github.com/GeorgLink', 'gh_node_id': 'MDQ6VXNlcjg4NjIwMjE=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8862021?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/GeorgLink/followers', 'gh_following_url': 'https://api.github.com/users/GeorgLink/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/GeorgLink/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/GeorgLink/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/GeorgLink/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/GeorgLink/orgs', 'gh_repos_url': 'https://api.github.com/users/GeorgLink/repos', 'gh_events_url': 'https://api.github.com/users/GeorgLink/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/GeorgLink/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-06T23:36:06Z', 'cntrb_full_name': 'Georg J.P. Link'} +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008739-4500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Creating alias for email: linkgeorg@gmail.com +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Updating now resolved email linkgeorg@gmail.com +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 048de7f4ab40a63fcb5ab725182a734812c58c92 +2023-01-12 12:24:44 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/048de7f4ab40a63fcb5ab725182a734812c58c92 +2023-01-12 12:24:45 blueberry insert_facade_contributors[59440] INFO +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO err: GithubApiResult.SECONDARY_RATE_LIMIT +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: cmpvm5@mail.missouri.edu +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102ce6d-fe00-0000-0000-000000000000'), 'cntrb_login': 'CMPerniciaro', 'cntrb_created_at': '2019-01-27T14:54:31Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'cmpvm5@mail.missouri.edu', 'gh_user_id': 47083006, 'gh_login': 'CMPerniciaro', 'gh_url': 'https://api.github.com/users/CMPerniciaro', 'gh_html_url': 'https://github.com/CMPerniciaro', 'gh_node_id': 'MDQ6VXNlcjQ3MDgzMDA2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/47083006?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/CMPerniciaro/followers', 'gh_following_url': 'https://api.github.com/users/CMPerniciaro/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/CMPerniciaro/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/CMPerniciaro/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/CMPerniciaro/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/CMPerniciaro/orgs', 'gh_repos_url': 'https://api.github.com/users/CMPerniciaro/repos', 'gh_events_url': 'https://api.github.com/users/CMPerniciaro/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/CMPerniciaro/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T19:15:24Z', 'cntrb_full_name': 'Carolyn Perniciaro'} +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102ce6d-fe00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO Creating alias for email: cmpvm5@mail.missouri.edu +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO Updating now resolved email cmpvm5@mail.missouri.edu +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 05172e65644ee066627a0f9ea2eb6707458dc274 +2023-01-12 12:25:45 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/05172e65644ee066627a0f9ea2eb6707458dc274 +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: gogginss@missouri.edu +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010005cb-c700-0000-0000-000000000000'), 'cntrb_login': 'sgoggins', 'cntrb_created_at': '2010-08-29T16:25:48Z', 'cntrb_email': 's@goggins.com', 'cntrb_company': 'University of Missouri & Linux Foundation CHAOSS Working Group', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 's@goggins.com', 'gh_user_id': 379847, 'gh_login': 'sgoggins', 'gh_url': 'https://api.github.com/users/sgoggins', 'gh_html_url': 'https://github.com/sgoggins', 'gh_node_id': 'MDQ6VXNlcjM3OTg0Nw==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/379847?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sgoggins/followers', 'gh_following_url': 'https://api.github.com/users/sgoggins/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sgoggins/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sgoggins/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sgoggins/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sgoggins/orgs', 'gh_repos_url': 'https://api.github.com/users/sgoggins/repos', 'gh_events_url': 'https://api.github.com/users/sgoggins/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sgoggins/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T18:26:13Z', 'cntrb_full_name': 'Sean P. Goggins'} +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO cntrb_id 010005cb-c700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO Creating alias for email: gogginss@missouri.edu +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO Updating now resolved email gogginss@missouri.edu +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 05c325ce12ddb57e5d940510d33ac116c5174f3c +2023-01-12 12:25:46 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/05c325ce12ddb57e5d940510d33ac116c5174f3c +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: brennansean6@gmail.com +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000e73-6200-0000-0000-000000000000'), 'cntrb_login': 'sbrennan98', 'cntrb_created_at': '2011-07-29T15:56:07Z', 'cntrb_email': 'sbrennan@mail.missouri.edu', 'cntrb_company': None, 'cntrb_location': 'Austin, TX', 'cntrb_canonical': 'sbrennan@mail.missouri.edu', 'gh_user_id': 947042, 'gh_login': 'sbrennan98', 'gh_url': 'https://api.github.com/users/sbrennan98', 'gh_html_url': 'https://github.com/sbrennan98', 'gh_node_id': 'MDQ6VXNlcjk0NzA0Mg==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/947042?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sbrennan98/followers', 'gh_following_url': 'https://api.github.com/users/sbrennan98/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sbrennan98/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sbrennan98/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sbrennan98/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sbrennan98/orgs', 'gh_repos_url': 'https://api.github.com/users/sbrennan98/repos', 'gh_events_url': 'https://api.github.com/users/sbrennan98/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sbrennan98/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-04T00:06:20Z', 'cntrb_full_name': 'Sean Brennan'} +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000e73-6200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO Creating alias for email: brennansean6@gmail.com +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO Updating now resolved email brennansean6@gmail.com +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 05e612599006c25984626859865547596872c570 +2023-01-12 12:25:47 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/05e612599006c25984626859865547596872c570 +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: misc@redhat.com +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000383-bf00-0000-0000-000000000000'), 'cntrb_login': 'mscherer', 'cntrb_created_at': '2010-03-25T16:35:22Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'misc@redhat.com', 'gh_user_id': 230335, 'gh_login': 'mscherer', 'gh_url': 'https://api.github.com/users/mscherer', 'gh_html_url': 'https://github.com/mscherer', 'gh_node_id': 'MDQ6VXNlcjIzMDMzNQ==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/230335?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mscherer/followers', 'gh_following_url': 'https://api.github.com/users/mscherer/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mscherer/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mscherer/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mscherer/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mscherer/orgs', 'gh_repos_url': 'https://api.github.com/users/mscherer/repos', 'gh_events_url': 'https://api.github.com/users/mscherer/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mscherer/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T10:32:01Z', 'cntrb_full_name': 'Michael Scherer'} +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000383-bf00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO Creating alias for email: misc@redhat.com +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO Updating now resolved email misc@redhat.com +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 072a73e4dbcd71983f14efe222984f88a8f6f03b +2023-01-12 12:25:48 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/072a73e4dbcd71983f14efe222984f88a8f6f03b +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 73853439+preeti-14-7@users.noreply.github.com +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010466e9-ff00-0000-0000-000000000000'), 'cntrb_login': 'preeti-14-7', 'cntrb_created_at': '2020-11-02T20:10:33Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'WEST BENGAL ', 'cntrb_canonical': '73853439+preeti-14-7@users.noreply.github.com', 'gh_user_id': 73853439, 'gh_login': 'preeti-14-7', 'gh_url': 'https://api.github.com/users/preeti-14-7', 'gh_html_url': 'https://github.com/preeti-14-7', 'gh_node_id': 'MDQ6VXNlcjczODUzNDM5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/73853439?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/preeti-14-7/followers', 'gh_following_url': 'https://api.github.com/users/preeti-14-7/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/preeti-14-7/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/preeti-14-7/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/preeti-14-7/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/preeti-14-7/orgs', 'gh_repos_url': 'https://api.github.com/users/preeti-14-7/repos', 'gh_events_url': 'https://api.github.com/users/preeti-14-7/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/preeti-14-7/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-30T12:58:38Z', 'cntrb_full_name': 'Preeti Yadav'} +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO cntrb_id 010466e9-ff00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 73853439+preeti-14-7@users.noreply.github.com +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 73853439+preeti-14-7@users.noreply.github.com +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 073f61dcee84b5a9ea660147b6c5f876387e97bd +2023-01-12 12:25:49 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/073f61dcee84b5a9ea660147b6c5f876387e97bd +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 55105024+adammenker@users.noreply.github.com +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010348d6-0000-0000-0000-000000000000'), 'cntrb_login': 'adammenker', 'cntrb_created_at': '2019-09-09T17:40:06Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '55105024+adammenker@users.noreply.github.com', 'gh_user_id': 55105024, 'gh_login': 'adammenker', 'gh_url': 'https://api.github.com/users/adammenker', 'gh_html_url': 'https://github.com/adammenker', 'gh_node_id': 'MDQ6VXNlcjU1MTA1MDI0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/55105024?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/adammenker/followers', 'gh_following_url': 'https://api.github.com/users/adammenker/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/adammenker/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/adammenker/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/adammenker/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/adammenker/orgs', 'gh_repos_url': 'https://api.github.com/users/adammenker/repos', 'gh_events_url': 'https://api.github.com/users/adammenker/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/adammenker/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T18:53:26Z', 'cntrb_full_name': 'adammenker'} +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO cntrb_id 010348d6-0000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 55105024+adammenker@users.noreply.github.com +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 55105024+adammenker@users.noreply.github.com +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: goggins@missouri.edu +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010005cb-c700-0000-0000-000000000000'), 'cntrb_login': 'sgoggins', 'cntrb_created_at': '2010-08-29T16:25:48Z', 'cntrb_email': 's@goggins.com', 'cntrb_company': 'University of Missouri & Linux Foundation CHAOSS Working Group', 'cntrb_location': 'Columbia, MO', 'cntrb_canonical': 's@goggins.com', 'gh_user_id': 379847, 'gh_login': 'sgoggins', 'gh_url': 'https://api.github.com/users/sgoggins', 'gh_html_url': 'https://github.com/sgoggins', 'gh_node_id': 'MDQ6VXNlcjM3OTg0Nw==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/379847?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sgoggins/followers', 'gh_following_url': 'https://api.github.com/users/sgoggins/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sgoggins/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sgoggins/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sgoggins/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sgoggins/orgs', 'gh_repos_url': 'https://api.github.com/users/sgoggins/repos', 'gh_events_url': 'https://api.github.com/users/sgoggins/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sgoggins/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T18:26:13Z', 'cntrb_full_name': 'Sean P. Goggins'} +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO cntrb_id 010005cb-c700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Creating alias for email: goggins@missouri.edu +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Updating now resolved email goggins@missouri.edu +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 07c229206f237907158f28ed57c772a7b9407788 +2023-01-12 12:25:50 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/07c229206f237907158f28ed57c772a7b9407788 +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: nodira.ibrogimova@gmail.com +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100ac4c-c000-0000-0000-000000000000'), 'cntrb_login': 'NodiraIbrogimova', 'cntrb_created_at': '2015-03-03T10:25:14Z', 'cntrb_email': None, 'cntrb_company': 'Self-employed', 'cntrb_location': 'Earth', 'cntrb_canonical': 'nodira.ibrogimova@gmail.com', 'gh_user_id': 11291840, 'gh_login': 'NodiraIbrogimova', 'gh_url': 'https://api.github.com/users/NodiraIbrogimova', 'gh_html_url': 'https://github.com/NodiraIbrogimova', 'gh_node_id': 'MDQ6VXNlcjExMjkxODQw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/11291840?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/NodiraIbrogimova/followers', 'gh_following_url': 'https://api.github.com/users/NodiraIbrogimova/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/NodiraIbrogimova/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/NodiraIbrogimova/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/NodiraIbrogimova/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/NodiraIbrogimova/orgs', 'gh_repos_url': 'https://api.github.com/users/NodiraIbrogimova/repos', 'gh_events_url': 'https://api.github.com/users/NodiraIbrogimova/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/NodiraIbrogimova/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-21T11:01:59Z', 'cntrb_full_name': 'Nodira Ibrogimova'} +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100ac4c-c000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Creating alias for email: nodira.ibrogimova@gmail.com +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Updating now resolved email nodira.ibrogimova@gmail.com +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 07e839c6a2cc21d1aee7a95e212d72381dbda558 +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/07e839c6a2cc21d1aee7a95e212d72381dbda558 +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:25:51 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Alexandre Courouble', 'hash': '07e839c6a2cc21d1aee7a95e212d72381dbda558', 'email_raw': 'acourouble@vmware.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=acourouble@vmware.com+in:email+type:user +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from acourouble@vmware.com +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'acourouble@vmware.com', 'name': 'Alexandre Courouble'} +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO Search query did not return any results, adding commit's table remains null... +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 092f3ac3bdb5548c11d6f265af35c8ea325b2920 +2023-01-12 12:25:52 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/092f3ac3bdb5548c11d6f265af35c8ea325b2920 +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: stuart.aldrich.1@gmail.com +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102b39d-7200-0000-0000-000000000000'), 'cntrb_login': 'sta97', 'cntrb_created_at': '2018-11-25T05:41:51Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'stuart.aldrich.1@gmail.com', 'gh_user_id': 45325682, 'gh_login': 'sta97', 'gh_url': 'https://api.github.com/users/sta97', 'gh_html_url': 'https://github.com/sta97', 'gh_node_id': 'MDQ6VXNlcjQ1MzI1Njgy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/45325682?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sta97/followers', 'gh_following_url': 'https://api.github.com/users/sta97/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sta97/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sta97/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sta97/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sta97/orgs', 'gh_repos_url': 'https://api.github.com/users/sta97/repos', 'gh_events_url': 'https://api.github.com/users/sta97/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sta97/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-26T12:48:12Z', 'cntrb_full_name': 'StuartA'} +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102b39d-7200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO Creating alias for email: stuart.aldrich.1@gmail.com +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO Updating now resolved email stuart.aldrich.1@gmail.com +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 09e6dc4496ad23355ee8d15fbe365595b4e9c57a +2023-01-12 12:25:53 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/09e6dc4496ad23355ee8d15fbe365595b4e9c57a +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: uniquep201@gmail.com +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102830a-c500-0000-0000-000000000000'), 'cntrb_login': 'Preshh0', 'cntrb_created_at': '2018-08-06T11:56:23Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Enugu, Nigeria.', 'cntrb_canonical': 'uniquep201@gmail.com', 'gh_user_id': 42142405, 'gh_login': 'Preshh0', 'gh_url': 'https://api.github.com/users/Preshh0', 'gh_html_url': 'https://github.com/Preshh0', 'gh_node_id': 'MDQ6VXNlcjQyMTQyNDA1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/42142405?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Preshh0/followers', 'gh_following_url': 'https://api.github.com/users/Preshh0/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Preshh0/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Preshh0/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Preshh0/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Preshh0/orgs', 'gh_repos_url': 'https://api.github.com/users/Preshh0/repos', 'gh_events_url': 'https://api.github.com/users/Preshh0/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Preshh0/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-01T20:11:31Z', 'cntrb_full_name': 'Precious Onyewuchi'} +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102830a-c500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Creating alias for email: uniquep201@gmail.com +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Updating now resolved email uniquep201@gmail.com +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0ab24f8f712487e757ff2069aeb7773eada27241 +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0ab24f8f712487e757ff2069aeb7773eada27241 +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:25:54 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'JamesDonovan1', 'hash': '0ab24f8f712487e757ff2069aeb7773eada27241', 'email_raw': 'jmdm4r@umsystem.edu', 'resolution_status': 'not_unresolved'} +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=jmdm4r@umsystem.edu+in:email+type:user +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from jmdm4r@umsystem.edu +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'jmdm4r@umsystem.edu', 'name': 'JamesDonovan1'} +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0b6820c5b1b27974284c614a57a967822f4abaea +2023-01-12 12:25:55 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0b6820c5b1b27974284c614a57a967822f4abaea +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 59929366+arausio@users.noreply.github.com +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01039273-1600-0000-0000-000000000000'), 'cntrb_login': 'arausio', 'cntrb_created_at': '2020-01-15T16:40:24Z', 'cntrb_email': None, 'cntrb_company': '@chaoss @augurlabs', 'cntrb_location': 'The South Pole', 'cntrb_canonical': '59929366+arausio@users.noreply.github.com', 'gh_user_id': 59929366, 'gh_login': 'arausio', 'gh_url': 'https://api.github.com/users/arausio', 'gh_html_url': 'https://github.com/arausio', 'gh_node_id': 'MDQ6VXNlcjU5OTI5MzY2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/59929366?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/arausio/followers', 'gh_following_url': 'https://api.github.com/users/arausio/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/arausio/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/arausio/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/arausio/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/arausio/orgs', 'gh_repos_url': 'https://api.github.com/users/arausio/repos', 'gh_events_url': 'https://api.github.com/users/arausio/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/arausio/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-11-20T16:38:50Z', 'cntrb_full_name': 'pingu!'} +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO cntrb_id 01039273-1600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 59929366+arausio@users.noreply.github.com +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 59929366+arausio@users.noreply.github.com +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0bca990fa4f3da3525849eb37b9e1801c143fbab +2023-01-12 12:25:56 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0bca990fa4f3da3525849eb37b9e1801c143fbab +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 43684300+pratikmishra356@users.noreply.github.com +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01029a91-cc00-0000-0000-000000000000'), 'cntrb_login': 'pratikmishra356', 'cntrb_created_at': '2018-09-28T18:55:36Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'KOLKATA', 'cntrb_canonical': '43684300+pratikmishra356@users.noreply.github.com', 'gh_user_id': 43684300, 'gh_login': 'pratikmishra356', 'gh_url': 'https://api.github.com/users/pratikmishra356', 'gh_html_url': 'https://github.com/pratikmishra356', 'gh_node_id': 'MDQ6VXNlcjQzNjg0MzAw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/43684300?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/pratikmishra356/followers', 'gh_following_url': 'https://api.github.com/users/pratikmishra356/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/pratikmishra356/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/pratikmishra356/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/pratikmishra356/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/pratikmishra356/orgs', 'gh_repos_url': 'https://api.github.com/users/pratikmishra356/repos', 'gh_events_url': 'https://api.github.com/users/pratikmishra356/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/pratikmishra356/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T13:00:55Z', 'cntrb_full_name': 'PRATIK MISHRA'} +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO cntrb_id 01029a91-cc00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 43684300+pratikmishra356@users.noreply.github.com +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 43684300+pratikmishra356@users.noreply.github.com +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0c4ad489b886a531972954472e2306573db341b2 +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0c4ad489b886a531972954472e2306573db341b2 +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:25:57 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Michael Woodruff', 'hash': '0c4ad489b886a531972954472e2306573db341b2', 'email_raw': 'michaelwoodruff@Michaels-MacBook-Pro.local', 'resolution_status': 'not_unresolved'} +2023-01-12 12:25:58 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=michaelwoodruff@Michaels-MacBook-Pro.local+in:email+type:user +2023-01-12 12:25:58 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from michaelwoodruff@Michaels-MacBook-Pro.local +2023-01-12 12:25:58 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'michaelwoodruff@Michaels-MacBook-Pro.local', 'name': 'Michael Woodruff'} +2023-01-12 12:25:58 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:25:58 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'woodruff', 'id': 8599418, 'node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/woodruff', 'html_url': 'https://github.com/woodruff', 'followers_url': 'https://api.github.com/users/woodruff/followers', 'following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'organizations_url': 'https://api.github.com/users/woodruff/orgs', 'repos_url': 'https://api.github.com/users/woodruff/repos', 'events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'received_events_url': 'https://api.github.com/users/woodruff/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruff@Michaels-MacBook-Pro.local +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008337-7a00-0000-0000-000000000000'), 'cntrb_login': 'woodruff', 'cntrb_created_at': '2014-08-30T15:05:47Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'michaelwoodruff@Michaels-MacBook-Pro.local', 'gh_user_id': 8599418, 'gh_login': 'woodruff', 'gh_url': 'https://api.github.com/users/woodruff', 'gh_html_url': 'https://github.com/woodruff', 'gh_node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/woodruff/followers', 'gh_following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/woodruff/orgs', 'gh_repos_url': 'https://api.github.com/users/woodruff/repos', 'gh_events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/woodruff/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2016-02-27T18:47:24Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008337-7a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruff@Michaels-MacBook-Pro.local +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruff@Michaels-MacBook-Pro.local +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0c83e4cca5e9b28c07d1674882401da46c363e47 +2023-01-12 12:25:59 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0c83e4cca5e9b28c07d1674882401da46c363e47 +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 42142405+Preshh0@users.noreply.github.com +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102830a-c500-0000-0000-000000000000'), 'cntrb_login': 'Preshh0', 'cntrb_created_at': '2018-08-06T11:56:23Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Enugu, Nigeria.', 'cntrb_canonical': '42142405+Preshh0@users.noreply.github.com', 'gh_user_id': 42142405, 'gh_login': 'Preshh0', 'gh_url': 'https://api.github.com/users/Preshh0', 'gh_html_url': 'https://github.com/Preshh0', 'gh_node_id': 'MDQ6VXNlcjQyMTQyNDA1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/42142405?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Preshh0/followers', 'gh_following_url': 'https://api.github.com/users/Preshh0/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Preshh0/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Preshh0/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Preshh0/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Preshh0/orgs', 'gh_repos_url': 'https://api.github.com/users/Preshh0/repos', 'gh_events_url': 'https://api.github.com/users/Preshh0/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Preshh0/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-01T20:11:31Z', 'cntrb_full_name': 'Precious Onyewuchi'} +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102830a-c500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 42142405+Preshh0@users.noreply.github.com +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 42142405+Preshh0@users.noreply.github.com +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0d64909e8f9efedfcbeec29ed21ee05bce8c61e0 +2023-01-12 12:26:00 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0d64909e8f9efedfcbeec29ed21ee05bce8c61e0 +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: nichols.keanu9@gmail.com +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101a73e-8100-0000-0000-000000000000'), 'cntrb_login': 'kmn5409', 'cntrb_created_at': '2017-04-18T18:00:51Z', 'cntrb_email': 'nichols.keanu9@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Trinidad and Tobago', 'cntrb_canonical': 'nichols.keanu9@gmail.com', 'gh_user_id': 27737729, 'gh_login': 'kmn5409', 'gh_url': 'https://api.github.com/users/kmn5409', 'gh_html_url': 'https://github.com/kmn5409', 'gh_node_id': 'MDQ6VXNlcjI3NzM3NzI5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/27737729?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/kmn5409/followers', 'gh_following_url': 'https://api.github.com/users/kmn5409/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/kmn5409/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/kmn5409/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/kmn5409/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/kmn5409/orgs', 'gh_repos_url': 'https://api.github.com/users/kmn5409/repos', 'gh_events_url': 'https://api.github.com/users/kmn5409/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/kmn5409/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T15:20:11Z', 'cntrb_full_name': 'Keanu Nichols'} +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101a73e-8100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Creating alias for email: nichols.keanu9@gmail.com +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Updating now resolved email nichols.keanu9@gmail.com +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0ee2298b86bd47b931fc3bee834d588461061764 +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0ee2298b86bd47b931fc3bee834d588461061764 +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 83814427+flyagaricdev@users.noreply.github.com +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0104fee8-1b00-0000-0000-000000000000'), 'cntrb_login': 'flyagaricdev', 'cntrb_created_at': '2021-05-07T09:39:31Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '83814427+flyagaricdev@users.noreply.github.com', 'gh_user_id': 83814427, 'gh_login': 'flyagaricdev', 'gh_url': 'https://api.github.com/users/flyagaricdev', 'gh_html_url': 'https://github.com/flyagaricdev', 'gh_node_id': 'MDQ6VXNlcjgzODE0NDI3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/83814427?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/flyagaricdev/followers', 'gh_following_url': 'https://api.github.com/users/flyagaricdev/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/flyagaricdev/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/flyagaricdev/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/flyagaricdev/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/flyagaricdev/orgs', 'gh_repos_url': 'https://api.github.com/users/flyagaricdev/repos', 'gh_events_url': 'https://api.github.com/users/flyagaricdev/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/flyagaricdev/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-06-08T12:15:06Z', 'cntrb_full_name': 'Agustina Milozzi'} +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO cntrb_id 0104fee8-1b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 83814427+flyagaricdev@users.noreply.github.com +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:01 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 83814427+flyagaricdev@users.noreply.github.com +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruff@mwc-021001.dhcp.missouri.edu +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008337-7a00-0000-0000-000000000000'), 'cntrb_login': 'woodruff', 'cntrb_created_at': '2014-08-30T15:05:47Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'michaelwoodruff@mwc-021001.dhcp.missouri.edu', 'gh_user_id': 8599418, 'gh_login': 'woodruff', 'gh_url': 'https://api.github.com/users/woodruff', 'gh_html_url': 'https://github.com/woodruff', 'gh_node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/woodruff/followers', 'gh_following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/woodruff/orgs', 'gh_repos_url': 'https://api.github.com/users/woodruff/repos', 'gh_events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/woodruff/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2016-02-27T18:47:24Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008337-7a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruff@mwc-021001.dhcp.missouri.edu +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruff@mwc-021001.dhcp.missouri.edu +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 0faca7c35131faee503ceeaf892547e80d699208 +2023-01-12 12:26:02 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/0faca7c35131faee503ceeaf892547e80d699208 +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: maximilian.huber@tngtech.com +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100121c-ea00-0000-0000-000000000000'), 'cntrb_login': 'maxhbr', 'cntrb_created_at': '2011-11-10T20:01:46Z', 'cntrb_email': 'gh@maxhbr.de', 'cntrb_company': '@TNG Technology Consulting GmbH', 'cntrb_location': 'Germany', 'cntrb_canonical': 'gh@maxhbr.de', 'gh_user_id': 1187050, 'gh_login': 'maxhbr', 'gh_url': 'https://api.github.com/users/maxhbr', 'gh_html_url': 'https://github.com/maxhbr', 'gh_node_id': 'MDQ6VXNlcjExODcwNTA=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1187050?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/maxhbr/followers', 'gh_following_url': 'https://api.github.com/users/maxhbr/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/maxhbr/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/maxhbr/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/maxhbr/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/maxhbr/orgs', 'gh_repos_url': 'https://api.github.com/users/maxhbr/repos', 'gh_events_url': 'https://api.github.com/users/maxhbr/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/maxhbr/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-14T08:20:50Z', 'cntrb_full_name': 'Maximilian Huber'} +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100121c-ea00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Creating alias for email: maximilian.huber@tngtech.com +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Updating now resolved email maximilian.huber@tngtech.com +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 31522931+ccarterlandis@users.noreply.github.com +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101e100-7300-0000-0000-000000000000'), 'cntrb_login': 'ccarterlandis', 'cntrb_created_at': '2017-08-31T22:01:37Z', 'cntrb_email': 'c@carterlandis.com', 'cntrb_company': '@Gusto', 'cntrb_location': None, 'cntrb_canonical': 'c@carterlandis.com', 'gh_user_id': 31522931, 'gh_login': 'ccarterlandis', 'gh_url': 'https://api.github.com/users/ccarterlandis', 'gh_html_url': 'https://github.com/ccarterlandis', 'gh_node_id': 'MDQ6VXNlcjMxNTIyOTMx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/31522931?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ccarterlandis/followers', 'gh_following_url': 'https://api.github.com/users/ccarterlandis/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ccarterlandis/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ccarterlandis/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ccarterlandis/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ccarterlandis/orgs', 'gh_repos_url': 'https://api.github.com/users/ccarterlandis/repos', 'gh_events_url': 'https://api.github.com/users/ccarterlandis/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ccarterlandis/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T17:00:36Z', 'cntrb_full_name': 'Carter Landis'} +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101e100-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 31522931+ccarterlandis@users.noreply.github.com +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 31522931+ccarterlandis@users.noreply.github.com +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 10e7d336e754a67d251d4a3c196317aec7340d82 +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/10e7d336e754a67d251d4a3c196317aec7340d82 +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:03 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'sean', 'hash': '10e7d336e754a67d251d4a3c196317aec7340d82', 'email_raw': 'Doctor Sean P. Goggins, I', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=Doctor Sean P. Goggins, I+in:email+type:user +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from Doctor Sean P. Goggins, I +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'Doctor Sean P. Goggins, I', 'name': 'sean'} +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 116c3d3b47e30ef9425ddaa775c27eb02624d0c1 +2023-01-12 12:26:04 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/116c3d3b47e30ef9425ddaa775c27eb02624d0c1 +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: lylynaheng@users.noreply.github.com +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01007b5e-cc00-0000-0000-000000000000'), 'cntrb_login': 'lylynaheng', 'cntrb_created_at': '2014-07-07T02:41:13Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Bay Area, CA', 'cntrb_canonical': 'lylynaheng@users.noreply.github.com', 'gh_user_id': 8085196, 'gh_login': 'lylynaheng', 'gh_url': 'https://api.github.com/users/lylynaheng', 'gh_html_url': 'https://github.com/lylynaheng', 'gh_node_id': 'MDQ6VXNlcjgwODUxOTY=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8085196?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/lylynaheng/followers', 'gh_following_url': 'https://api.github.com/users/lylynaheng/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/lylynaheng/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/lylynaheng/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/lylynaheng/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/lylynaheng/orgs', 'gh_repos_url': 'https://api.github.com/users/lylynaheng/repos', 'gh_events_url': 'https://api.github.com/users/lylynaheng/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/lylynaheng/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T01:04:13Z', 'cntrb_full_name': 'Lylyna Heng '} +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO cntrb_id 01007b5e-cc00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO Creating alias for email: lylynaheng@users.noreply.github.com +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO Updating now resolved email lylynaheng@users.noreply.github.com +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 11bba2096e122398546b459191b6d252d06477c9 +2023-01-12 12:26:05 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/11bba2096e122398546b459191b6d252d06477c9 +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: maximumbalk@gmail.com +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01012a29-7c00-0000-0000-000000000000'), 'cntrb_login': 'maxbalk', 'cntrb_created_at': '2016-05-23T21:32:04Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'maximumbalk@gmail.com', 'gh_user_id': 19540348, 'gh_login': 'maxbalk', 'gh_url': 'https://api.github.com/users/maxbalk', 'gh_html_url': 'https://github.com/maxbalk', 'gh_node_id': 'MDQ6VXNlcjE5NTQwMzQ4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/19540348?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/maxbalk/followers', 'gh_following_url': 'https://api.github.com/users/maxbalk/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/maxbalk/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/maxbalk/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/maxbalk/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/maxbalk/orgs', 'gh_repos_url': 'https://api.github.com/users/maxbalk/repos', 'gh_events_url': 'https://api.github.com/users/maxbalk/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/maxbalk/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-08T17:18:42Z', 'cntrb_full_name': 'Max'} +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO cntrb_id 01012a29-7c00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO Creating alias for email: maximumbalk@gmail.com +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO Updating now resolved email maximumbalk@gmail.com +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 11f218fd9c0902b952ed292beef85d8a0c749cd1 +2023-01-12 12:26:06 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/11f218fd9c0902b952ed292beef85d8a0c749cd1 +2023-01-12 12:26:07 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:07 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Pratik Mishra', 'hash': '11f218fd9c0902b952ed292beef85d8a0c749cd1', 'email_raw': 'pratikmishra@Pratiks-MacBook-Air.local', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:07 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=pratikmishra@Pratiks-MacBook-Air.local+in:email+type:user +2023-01-12 12:26:07 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from pratikmishra@Pratiks-MacBook-Air.local +2023-01-12 12:26:07 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'pratikmishra@Pratiks-MacBook-Air.local', 'name': 'Pratik Mishra'} +2023-01-12 12:26:07 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'DolceParadise', 'id': 74451989, 'node_id': 'MDQ6VXNlcjc0NDUxOTg5', 'avatar_url': 'https://avatars.githubusercontent.com/u/74451989?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/DolceParadise', 'html_url': 'https://github.com/DolceParadise', 'followers_url': 'https://api.github.com/users/DolceParadise/followers', 'following_url': 'https://api.github.com/users/DolceParadise/following{/other_user}', 'gists_url': 'https://api.github.com/users/DolceParadise/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/DolceParadise/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/DolceParadise/subscriptions', 'organizations_url': 'https://api.github.com/users/DolceParadise/orgs', 'repos_url': 'https://api.github.com/users/DolceParadise/repos', 'events_url': 'https://api.github.com/users/DolceParadise/events{/privacy}', 'received_events_url': 'https://api.github.com/users/DolceParadise/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: pratikmishra@Pratiks-MacBook-Air.local +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0104700c-1500-0000-0000-000000000000'), 'cntrb_login': 'DolceParadise', 'cntrb_created_at': '2020-11-14T10:16:22Z', 'cntrb_email': None, 'cntrb_company': 'Indian Institute of Technology (IIT-BHU)', 'cntrb_location': 'Mumbai', 'cntrb_canonical': 'pratikmishra@Pratiks-MacBook-Air.local', 'gh_user_id': 74451989, 'gh_login': 'DolceParadise', 'gh_url': 'https://api.github.com/users/DolceParadise', 'gh_html_url': 'https://github.com/DolceParadise', 'gh_node_id': 'MDQ6VXNlcjc0NDUxOTg5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/74451989?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/DolceParadise/followers', 'gh_following_url': 'https://api.github.com/users/DolceParadise/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/DolceParadise/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/DolceParadise/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/DolceParadise/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/DolceParadise/orgs', 'gh_repos_url': 'https://api.github.com/users/DolceParadise/repos', 'gh_events_url': 'https://api.github.com/users/DolceParadise/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/DolceParadise/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-10T13:29:33Z', 'cntrb_full_name': 'Pratik Mishra'} +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO cntrb_id 0104700c-1500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO Creating alias for email: pratikmishra@Pratiks-MacBook-Air.local +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO Updating now resolved email pratikmishra@Pratiks-MacBook-Air.local +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1225d2a23a9706a0e5ae089400c8dd64ba82ced5 +2023-01-12 12:26:08 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1225d2a23a9706a0e5ae089400c8dd64ba82ced5 +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: crwilcox57@gmail.com +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01050405-4300-0000-0000-000000000000'), 'cntrb_login': 'bglob', 'cntrb_created_at': '2021-05-13T18:04:57Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'crwilcox57@gmail.com', 'gh_user_id': 84149571, 'gh_login': 'bglob', 'gh_url': 'https://api.github.com/users/bglob', 'gh_html_url': 'https://github.com/bglob', 'gh_node_id': 'MDQ6VXNlcjg0MTQ5NTcx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/84149571?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/bglob/followers', 'gh_following_url': 'https://api.github.com/users/bglob/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/bglob/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/bglob/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/bglob/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/bglob/orgs', 'gh_repos_url': 'https://api.github.com/users/bglob/repos', 'gh_events_url': 'https://api.github.com/users/bglob/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/bglob/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-23T14:00:05Z', 'cntrb_full_name': 'Caleb Wilcox'} +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO cntrb_id 01050405-4300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO Creating alias for email: crwilcox57@gmail.com +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO Updating now resolved email crwilcox57@gmail.com +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 129f061ed582ef1552a52503695234e2906f9808 +2023-01-12 12:26:09 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/129f061ed582ef1552a52503695234e2906f9808 +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 31676518+tretrue@users.noreply.github.com +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101e358-6600-0000-0000-000000000000'), 'cntrb_login': 'tretrue', 'cntrb_created_at': '2017-09-06T00:45:29Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'University of Missouri - Columbia', 'cntrb_canonical': '31676518+tretrue@users.noreply.github.com', 'gh_user_id': 31676518, 'gh_login': 'tretrue', 'gh_url': 'https://api.github.com/users/tretrue', 'gh_html_url': 'https://github.com/tretrue', 'gh_node_id': 'MDQ6VXNlcjMxNjc2NTE4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/31676518?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/tretrue/followers', 'gh_following_url': 'https://api.github.com/users/tretrue/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/tretrue/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/tretrue/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/tretrue/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/tretrue/orgs', 'gh_repos_url': 'https://api.github.com/users/tretrue/repos', 'gh_events_url': 'https://api.github.com/users/tretrue/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/tretrue/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-06-12T23:30:05Z', 'cntrb_full_name': 'Robert Lincoln Truesdale III'} +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101e358-6600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 31676518+tretrue@users.noreply.github.com +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 31676518+tretrue@users.noreply.github.com +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 12bd7a82ddbc6705d7eb0e2aafa86087d3cd311f +2023-01-12 12:26:10 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/12bd7a82ddbc6705d7eb0e2aafa86087d3cd311f +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ortonpaul18@gmail.com +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01029a37-3300-0000-0000-000000000000'), 'cntrb_login': 'ortonpaul', 'cntrb_created_at': '2018-09-28T02:34:03Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'ortonpaul18@gmail.com', 'gh_user_id': 43661107, 'gh_login': 'ortonpaul', 'gh_url': 'https://api.github.com/users/ortonpaul', 'gh_html_url': 'https://github.com/ortonpaul', 'gh_node_id': 'MDQ6VXNlcjQzNjYxMTA3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/43661107?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ortonpaul/followers', 'gh_following_url': 'https://api.github.com/users/ortonpaul/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ortonpaul/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ortonpaul/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ortonpaul/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ortonpaul/orgs', 'gh_repos_url': 'https://api.github.com/users/ortonpaul/repos', 'gh_events_url': 'https://api.github.com/users/ortonpaul/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ortonpaul/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-10-27T14:29:40Z', 'cntrb_full_name': 'Paul Orton'} +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO cntrb_id 01029a37-3300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ortonpaul18@gmail.com +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ortonpaul18@gmail.com +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 14ae240ed19613a6d1ca532a9ebbf3c22d24066d +2023-01-12 12:26:11 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/14ae240ed19613a6d1ca532a9ebbf3c22d24066d +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: bhandari2003anurag@gmail.com +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0105777b-d900-0000-0000-000000000000'), 'cntrb_login': 'WhiteWolf47', 'cntrb_created_at': '2021-10-01T05:23:32Z', 'cntrb_email': 'bhandari2003anurag@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'bhandari2003anurag@gmail.com', 'gh_user_id': 91716569, 'gh_login': 'WhiteWolf47', 'gh_url': 'https://api.github.com/users/WhiteWolf47', 'gh_html_url': 'https://github.com/WhiteWolf47', 'gh_node_id': 'U_kgDOBXd72Q', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/91716569?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/WhiteWolf47/followers', 'gh_following_url': 'https://api.github.com/users/WhiteWolf47/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/WhiteWolf47/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/WhiteWolf47/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/WhiteWolf47/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/WhiteWolf47/orgs', 'gh_repos_url': 'https://api.github.com/users/WhiteWolf47/repos', 'gh_events_url': 'https://api.github.com/users/WhiteWolf47/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/WhiteWolf47/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-01T14:43:44Z', 'cntrb_full_name': 'ANURAG BHANDARI'} +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO cntrb_id 0105777b-d900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO Creating alias for email: bhandari2003anurag@gmail.com +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO Updating now resolved email bhandari2003anurag@gmail.com +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 15709e1d7d14e7224ba8e8eca933b0d1db76899d +2023-01-12 12:26:12 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/15709e1d7d14e7224ba8e8eca933b0d1db76899d +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 67657066+asaayushisingh@users.noreply.github.com +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0104085d-6a00-0000-0000-000000000000'), 'cntrb_login': 'singhavs', 'cntrb_created_at': '2020-06-30T19:26:37Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Bangalore', 'cntrb_canonical': '67657066+asaayushisingh@users.noreply.github.com', 'gh_user_id': 67657066, 'gh_login': 'singhavs', 'gh_url': 'https://api.github.com/users/singhavs', 'gh_html_url': 'https://github.com/singhavs', 'gh_node_id': 'MDQ6VXNlcjY3NjU3MDY2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/67657066?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/singhavs/followers', 'gh_following_url': 'https://api.github.com/users/singhavs/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/singhavs/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/singhavs/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/singhavs/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/singhavs/orgs', 'gh_repos_url': 'https://api.github.com/users/singhavs/repos', 'gh_events_url': 'https://api.github.com/users/singhavs/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/singhavs/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T06:13:35Z', 'cntrb_full_name': 'singhavs'} +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO cntrb_id 0104085d-6a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 67657066+asaayushisingh@users.noreply.github.com +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 67657066+asaayushisingh@users.noreply.github.com +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 15a9be06ebaab5e070e45e3ec6dc9b3d99290a27 +2023-01-12 12:26:13 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/15a9be06ebaab5e070e45e3ec6dc9b3d99290a27 +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ajeurkar@proximabiz.com +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010321f6-8000-0000-0000-000000000000'), 'cntrb_login': 'ajeurkar', 'cntrb_created_at': '2019-07-05T04:49:32Z', 'cntrb_email': None, 'cntrb_company': 'Proxima Solutions', 'cntrb_location': 'Pune', 'cntrb_canonical': 'ajeurkar@proximabiz.com', 'gh_user_id': 52557440, 'gh_login': 'ajeurkar', 'gh_url': 'https://api.github.com/users/ajeurkar', 'gh_html_url': 'https://github.com/ajeurkar', 'gh_node_id': 'MDQ6VXNlcjUyNTU3NDQw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/52557440?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ajeurkar/followers', 'gh_following_url': 'https://api.github.com/users/ajeurkar/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ajeurkar/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ajeurkar/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ajeurkar/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ajeurkar/orgs', 'gh_repos_url': 'https://api.github.com/users/ajeurkar/repos', 'gh_events_url': 'https://api.github.com/users/ajeurkar/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ajeurkar/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-02T07:30:11Z', 'cntrb_full_name': 'Ajinkya Jeurkar'} +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO cntrb_id 010321f6-8000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ajeurkar@proximabiz.com +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ajeurkar@proximabiz.com +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 15ed2b4852fb29fd03ec1c6a7c691bc29bff678d +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/15ed2b4852fb29fd03ec1c6a7c691bc29bff678d +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 61888364+sg7801@users.noreply.github.com +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103b057-6c00-0000-0000-000000000000'), 'cntrb_login': 'sg7801', 'cntrb_created_at': '2020-03-06T20:25:14Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '61888364+sg7801@users.noreply.github.com', 'gh_user_id': 61888364, 'gh_login': 'sg7801', 'gh_url': 'https://api.github.com/users/sg7801', 'gh_html_url': 'https://github.com/sg7801', 'gh_node_id': 'MDQ6VXNlcjYxODg4MzY0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/61888364?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sg7801/followers', 'gh_following_url': 'https://api.github.com/users/sg7801/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sg7801/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sg7801/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sg7801/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sg7801/orgs', 'gh_repos_url': 'https://api.github.com/users/sg7801/repos', 'gh_events_url': 'https://api.github.com/users/sg7801/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sg7801/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T12:09:29Z', 'cntrb_full_name': 'Srishti Guleria'} +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103b057-6c00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 61888364+sg7801@users.noreply.github.com +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 61888364+sg7801@users.noreply.github.com +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 161f73934c1123e55f34ca97779ba584706f4ccb +2023-01-12 12:26:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/161f73934c1123e55f34ca97779ba584706f4ccb +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: rooby786@yahoo.com +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0105ceb9-1100-0000-0000-000000000000'), 'cntrb_login': 'rooby786', 'cntrb_created_at': '2022-01-10T08:07:54Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'rooby786@yahoo.com', 'gh_user_id': 97433873, 'gh_login': 'rooby786', 'gh_url': 'https://api.github.com/users/rooby786', 'gh_html_url': 'https://github.com/rooby786', 'gh_node_id': 'U_kgDOBc65EQ', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/97433873?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/rooby786/followers', 'gh_following_url': 'https://api.github.com/users/rooby786/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/rooby786/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/rooby786/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/rooby786/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/rooby786/orgs', 'gh_repos_url': 'https://api.github.com/users/rooby786/repos', 'gh_events_url': 'https://api.github.com/users/rooby786/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/rooby786/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-10T10:20:50Z', 'cntrb_full_name': 'Robeena Anwar'} +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 0105ceb9-1100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: rooby786@yahoo.com +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email rooby786@yahoo.com +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 168dad1ba8c36615173c5395c881f3bfe38b328a +2023-01-12 12:26:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/168dad1ba8c36615173c5395c881f3bfe38b328a +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Eunice Cheng', 'hash': '168dad1ba8c36615173c5395c881f3bfe38b328a', 'email_raw': 'eunicheng@gmail.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=eunicheng@gmail.com+in:email+type:user +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from eunicheng@gmail.com +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'eunicheng@gmail.com', 'name': 'Eunice Cheng'} +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Search query did not return any results, adding commit's table remains null... +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 17bce17839935e317ef566b6afb8354e081b51ae +2023-01-12 12:26:16 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/17bce17839935e317ef566b6afb8354e081b51ae +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 58383771+Kushal-kothari@users.noreply.github.com +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01037add-9b00-0000-0000-000000000000'), 'cntrb_login': 'Kushal-kothari', 'cntrb_created_at': '2019-12-01T07:15:02Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': '127.0.0.1', 'cntrb_canonical': '58383771+Kushal-kothari@users.noreply.github.com', 'gh_user_id': 58383771, 'gh_login': 'Kushal-kothari', 'gh_url': 'https://api.github.com/users/Kushal-kothari', 'gh_html_url': 'https://github.com/Kushal-kothari', 'gh_node_id': 'MDQ6VXNlcjU4MzgzNzcx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/58383771?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Kushal-kothari/followers', 'gh_following_url': 'https://api.github.com/users/Kushal-kothari/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Kushal-kothari/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Kushal-kothari/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Kushal-kothari/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Kushal-kothari/orgs', 'gh_repos_url': 'https://api.github.com/users/Kushal-kothari/repos', 'gh_events_url': 'https://api.github.com/users/Kushal-kothari/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Kushal-kothari/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-04T19:29:03Z', 'cntrb_full_name': 'Kushal Kothari'} +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO cntrb_id 01037add-9b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 58383771+Kushal-kothari@users.noreply.github.com +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 58383771+Kushal-kothari@users.noreply.github.com +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 184bb6c1655a6243724221af9db8c2d965746aca +2023-01-12 12:26:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/184bb6c1655a6243724221af9db8c2d965746aca +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: manan.goel@research.iiit.ac.in +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101ec42-1900-0000-0000-000000000000'), 'cntrb_login': 'manangoel99', 'cntrb_created_at': '2017-09-25T08:43:03Z', 'cntrb_email': None, 'cntrb_company': '@wandb ', 'cntrb_location': 'Hyderabad, Telangana', 'cntrb_canonical': 'manan.goel@research.iiit.ac.in', 'gh_user_id': 32260633, 'gh_login': 'manangoel99', 'gh_url': 'https://api.github.com/users/manangoel99', 'gh_html_url': 'https://github.com/manangoel99', 'gh_node_id': 'MDQ6VXNlcjMyMjYwNjMz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/32260633?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/manangoel99/followers', 'gh_following_url': 'https://api.github.com/users/manangoel99/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/manangoel99/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/manangoel99/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/manangoel99/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/manangoel99/orgs', 'gh_repos_url': 'https://api.github.com/users/manangoel99/repos', 'gh_events_url': 'https://api.github.com/users/manangoel99/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/manangoel99/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-12T12:47:16Z', 'cntrb_full_name': 'Manan Goel'} +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101ec42-1900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO Creating alias for email: manan.goel@research.iiit.ac.in +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO Updating now resolved email manan.goel@research.iiit.ac.in +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 193654efc1328acdc27dc3cba029ba969e9e6702 +2023-01-12 12:26:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/193654efc1328acdc27dc3cba029ba969e9e6702 +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: shivikapriya730@gmail.com +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103985c-e700-0000-0000-000000000000'), 'cntrb_login': 'Priya730', 'cntrb_created_at': '2020-01-26T12:51:15Z', 'cntrb_email': 'shivikapriya730@gmail.com', 'cntrb_company': '@CHAOSS', 'cntrb_location': 'India', 'cntrb_canonical': 'shivikapriya730@gmail.com', 'gh_user_id': 60316903, 'gh_login': 'Priya730', 'gh_url': 'https://api.github.com/users/Priya730', 'gh_html_url': 'https://github.com/Priya730', 'gh_node_id': 'MDQ6VXNlcjYwMzE2OTAz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/60316903?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Priya730/followers', 'gh_following_url': 'https://api.github.com/users/Priya730/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Priya730/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Priya730/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Priya730/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Priya730/orgs', 'gh_repos_url': 'https://api.github.com/users/Priya730/repos', 'gh_events_url': 'https://api.github.com/users/Priya730/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Priya730/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-22T13:20:22Z', 'cntrb_full_name': 'Priya Srivastava'} +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103985c-e700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: shivikapriya730@gmail.com +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email shivikapriya730@gmail.com +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1bde56c9e24beab01374382a2929f994f93edf0a +2023-01-12 12:26:19 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1bde56c9e24beab01374382a2929f994f93edf0a +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ezn526@mail.missouri.edu +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010290ce-4700-0000-0000-000000000000'), 'cntrb_login': 'ElitaNelson', 'cntrb_created_at': '2018-09-06T17:24:22Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'ezn526@mail.missouri.edu', 'gh_user_id': 43044423, 'gh_login': 'ElitaNelson', 'gh_url': 'https://api.github.com/users/ElitaNelson', 'gh_html_url': 'https://github.com/ElitaNelson', 'gh_node_id': 'MDQ6VXNlcjQzMDQ0NDIz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/43044423?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ElitaNelson/followers', 'gh_following_url': 'https://api.github.com/users/ElitaNelson/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ElitaNelson/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ElitaNelson/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ElitaNelson/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ElitaNelson/orgs', 'gh_repos_url': 'https://api.github.com/users/ElitaNelson/repos', 'gh_events_url': 'https://api.github.com/users/ElitaNelson/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ElitaNelson/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-06T23:28:16Z', 'cntrb_full_name': 'Elita Nelson'} +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 010290ce-4700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ezn526@mail.missouri.edu +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ezn526@mail.missouri.edu +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1be4b2328dd9e074933a2909eef8e6ec9066ff39 +2023-01-12 12:26:20 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1be4b2328dd9e074933a2909eef8e6ec9066ff39 +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: agrawalmeet91@gmail.com +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010467ab-1a00-0000-0000-000000000000'), 'cntrb_login': 'meetagrawal09', 'cntrb_created_at': '2020-11-03T17:02:01Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Nagpur, Maharashtra', 'cntrb_canonical': 'agrawalmeet91@gmail.com', 'gh_user_id': 73902874, 'gh_login': 'meetagrawal09', 'gh_url': 'https://api.github.com/users/meetagrawal09', 'gh_html_url': 'https://github.com/meetagrawal09', 'gh_node_id': 'MDQ6VXNlcjczOTAyODc0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/73902874?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/meetagrawal09/followers', 'gh_following_url': 'https://api.github.com/users/meetagrawal09/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/meetagrawal09/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/meetagrawal09/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/meetagrawal09/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/meetagrawal09/orgs', 'gh_repos_url': 'https://api.github.com/users/meetagrawal09/repos', 'gh_events_url': 'https://api.github.com/users/meetagrawal09/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/meetagrawal09/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-28T11:59:39Z', 'cntrb_full_name': 'Meet Agrawal'} +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 010467ab-1a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: agrawalmeet91@gmail.com +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email agrawalmeet91@gmail.com +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1c9ed8537d373d1acfd99e07500758ea404fa939 +2023-01-12 12:26:21 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1c9ed8537d373d1acfd99e07500758ea404fa939 +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 68592073+anujlamoria@users.noreply.github.com +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010416a1-c900-0000-0000-000000000000'), 'cntrb_login': 'anujlamoria', 'cntrb_created_at': '2020-07-21T10:30:37Z', 'cntrb_email': None, 'cntrb_company': 'Student', 'cntrb_location': 'Jaipur', 'cntrb_canonical': '68592073+anujlamoria@users.noreply.github.com', 'gh_user_id': 68592073, 'gh_login': 'anujlamoria', 'gh_url': 'https://api.github.com/users/anujlamoria', 'gh_html_url': 'https://github.com/anujlamoria', 'gh_node_id': 'MDQ6VXNlcjY4NTkyMDcz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/68592073?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/anujlamoria/followers', 'gh_following_url': 'https://api.github.com/users/anujlamoria/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/anujlamoria/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/anujlamoria/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/anujlamoria/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/anujlamoria/orgs', 'gh_repos_url': 'https://api.github.com/users/anujlamoria/repos', 'gh_events_url': 'https://api.github.com/users/anujlamoria/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/anujlamoria/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-11-16T11:38:37Z', 'cntrb_full_name': 'Anuj Lamoria'} +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 010416a1-c900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 68592073+anujlamoria@users.noreply.github.com +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 68592073+anujlamoria@users.noreply.github.com +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1d396e7559386dc8839b757751510c96dd3feac4 +2023-01-12 12:26:22 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1d396e7559386dc8839b757751510c96dd3feac4 +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: manishsaini6421@gmail.com +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01036af8-d400-0000-0000-000000000000'), 'cntrb_login': 'manishsaini6421', 'cntrb_created_at': '2019-11-04T06:26:46Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'manishsaini6421@gmail.com', 'gh_user_id': 57342164, 'gh_login': 'manishsaini6421', 'gh_url': 'https://api.github.com/users/manishsaini6421', 'gh_html_url': 'https://github.com/manishsaini6421', 'gh_node_id': 'MDQ6VXNlcjU3MzQyMTY0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/57342164?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/manishsaini6421/followers', 'gh_following_url': 'https://api.github.com/users/manishsaini6421/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/manishsaini6421/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/manishsaini6421/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/manishsaini6421/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/manishsaini6421/orgs', 'gh_repos_url': 'https://api.github.com/users/manishsaini6421/repos', 'gh_events_url': 'https://api.github.com/users/manishsaini6421/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/manishsaini6421/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-13T13:16:14Z', 'cntrb_full_name': 'manishsaini6421'} +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 01036af8-d400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: manishsaini6421@gmail.com +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email manishsaini6421@gmail.com +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1d5eb72267cf00a5330096e49de13e6d595dc83a +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1d5eb72267cf00a5330096e49de13e6d595dc83a +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: sarit.adhikari@gmail.com +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100447d-f500-0000-0000-000000000000'), 'cntrb_login': 'sarit-adh', 'cntrb_created_at': '2013-05-21T11:30:01Z', 'cntrb_email': 'sarit.adhikari@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'sarit.adhikari@gmail.com', 'gh_user_id': 4488693, 'gh_login': 'sarit-adh', 'gh_url': 'https://api.github.com/users/sarit-adh', 'gh_html_url': 'https://github.com/sarit-adh', 'gh_node_id': 'MDQ6VXNlcjQ0ODg2OTM=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/4488693?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sarit-adh/followers', 'gh_following_url': 'https://api.github.com/users/sarit-adh/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sarit-adh/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sarit-adh/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sarit-adh/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sarit-adh/orgs', 'gh_repos_url': 'https://api.github.com/users/sarit-adh/repos', 'gh_events_url': 'https://api.github.com/users/sarit-adh/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sarit-adh/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-09-24T20:25:39Z', 'cntrb_full_name': 'Sarit Adhikari'} +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100447d-f500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: sarit.adhikari@gmail.com +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email sarit.adhikari@gmail.com +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1dacd011e2f64a1f4f880a87ca83cb990d778252 +2023-01-12 12:26:23 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1dacd011e2f64a1f4f880a87ca83cb990d778252 +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: bikid475@gmail.com +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01044fb0-a800-0000-0000-000000000000'), 'cntrb_login': 'Biki-das', 'cntrb_created_at': '2020-10-04T05:40:40Z', 'cntrb_email': 'bikid475@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Assam,India', 'cntrb_canonical': 'bikid475@gmail.com', 'gh_user_id': 72331432, 'gh_login': 'Biki-das', 'gh_url': 'https://api.github.com/users/Biki-das', 'gh_html_url': 'https://github.com/Biki-das', 'gh_node_id': 'MDQ6VXNlcjcyMzMxNDMy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/72331432?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Biki-das/followers', 'gh_following_url': 'https://api.github.com/users/Biki-das/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Biki-das/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Biki-das/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Biki-das/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Biki-das/orgs', 'gh_repos_url': 'https://api.github.com/users/Biki-das/repos', 'gh_events_url': 'https://api.github.com/users/Biki-das/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Biki-das/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-07T05:13:42Z', 'cntrb_full_name': 'BIKI DAS'} +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO cntrb_id 01044fb0-a800-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO Creating alias for email: bikid475@gmail.com +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO Updating now resolved email bikid475@gmail.com +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1db7b9984a2c75326cca3a330aef9fa20115022a +2023-01-12 12:26:24 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1db7b9984a2c75326cca3a330aef9fa20115022a +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 100847407+VikhyaRK@users.noreply.github.com +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010602cf-2f00-0000-0000-000000000000'), 'cntrb_login': 'VikhyaRK', 'cntrb_created_at': '2022-03-03T12:26:52Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '100847407+VikhyaRK@users.noreply.github.com', 'gh_user_id': 100847407, 'gh_login': 'VikhyaRK', 'gh_url': 'https://api.github.com/users/VikhyaRK', 'gh_html_url': 'https://github.com/VikhyaRK', 'gh_node_id': 'U_kgDOBgLPLw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/100847407?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/VikhyaRK/followers', 'gh_following_url': 'https://api.github.com/users/VikhyaRK/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/VikhyaRK/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/VikhyaRK/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/VikhyaRK/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/VikhyaRK/orgs', 'gh_repos_url': 'https://api.github.com/users/VikhyaRK/repos', 'gh_events_url': 'https://api.github.com/users/VikhyaRK/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/VikhyaRK/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-08T14:03:56Z', 'cntrb_full_name': 'VikhyaRK'} +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO cntrb_id 010602cf-2f00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 100847407+VikhyaRK@users.noreply.github.com +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 100847407+VikhyaRK@users.noreply.github.com +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1de1d8be3c035886a584afcdb9bb47bd8679ab64 +2023-01-12 12:26:25 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1de1d8be3c035886a584afcdb9bb47bd8679ab64 +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Ulincsys', 'hash': '1de1d8be3c035886a584afcdb9bb47bd8679ab64', 'email_raw': '', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Email less than two characters +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 1f795e9a8a8ae0493e329a9cc522a0e625ccc2f5 +2023-01-12 12:26:26 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/1f795e9a8a8ae0493e329a9cc522a0e625ccc2f5 +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: hacksmath@gmail.com +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100e3cd-6300-0000-0000-000000000000'), 'cntrb_login': 'micahswab', 'cntrb_created_at': '2015-10-01T17:48:21Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'hacksmath@gmail.com', 'gh_user_id': 14929251, 'gh_login': 'micahswab', 'gh_url': 'https://api.github.com/users/micahswab', 'gh_html_url': 'https://github.com/micahswab', 'gh_node_id': 'MDQ6VXNlcjE0OTI5MjUx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/14929251?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/micahswab/followers', 'gh_following_url': 'https://api.github.com/users/micahswab/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/micahswab/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/micahswab/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/micahswab/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/micahswab/orgs', 'gh_repos_url': 'https://api.github.com/users/micahswab/repos', 'gh_events_url': 'https://api.github.com/users/micahswab/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/micahswab/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-22T16:58:33Z', 'cntrb_full_name': 'Micah Swab'} +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100e3cd-6300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO Creating alias for email: hacksmath@gmail.com +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO Updating now resolved email hacksmath@gmail.com +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2021f4c62af8e2f3b2f5b42a0d8fd2b4a61dce77 +2023-01-12 12:26:27 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2021f4c62af8e2f3b2f5b42a0d8fd2b4a61dce77 +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 54278969+FalanaTolu@users.noreply.github.com +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01033c3b-3900-0000-0000-000000000000'), 'cntrb_login': 'FalanaTolu', 'cntrb_created_at': '2019-08-19T14:34:44Z', 'cntrb_email': 'falanaft@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Lagos, Nigeria', 'cntrb_canonical': 'falanaft@gmail.com', 'gh_user_id': 54278969, 'gh_login': 'FalanaTolu', 'gh_url': 'https://api.github.com/users/FalanaTolu', 'gh_html_url': 'https://github.com/FalanaTolu', 'gh_node_id': 'MDQ6VXNlcjU0Mjc4OTY5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/54278969?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/FalanaTolu/followers', 'gh_following_url': 'https://api.github.com/users/FalanaTolu/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/FalanaTolu/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/FalanaTolu/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/FalanaTolu/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/FalanaTolu/orgs', 'gh_repos_url': 'https://api.github.com/users/FalanaTolu/repos', 'gh_events_url': 'https://api.github.com/users/FalanaTolu/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/FalanaTolu/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-10T22:22:44Z', 'cntrb_full_name': 'Falana Tolulope'} +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO cntrb_id 01033c3b-3900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 54278969+FalanaTolu@users.noreply.github.com +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 54278969+FalanaTolu@users.noreply.github.com +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2084bf0497bd5f3282a19672cb458fd4b4833b4e +2023-01-12 12:26:28 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2084bf0497bd5f3282a19672cb458fd4b4833b4e +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 81832275+jiisa-k@users.noreply.github.com +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0104e0a9-5300-0000-0000-000000000000'), 'cntrb_login': 'jiisa-k', 'cntrb_created_at': '2021-04-02T21:14:49Z', 'cntrb_email': None, 'cntrb_company': 'Indian Institute of Technology, Roorkee', 'cntrb_location': 'Roorkee', 'cntrb_canonical': '81832275+jiisa-k@users.noreply.github.com', 'gh_user_id': 81832275, 'gh_login': 'jiisa-k', 'gh_url': 'https://api.github.com/users/jiisa-k', 'gh_html_url': 'https://github.com/jiisa-k', 'gh_node_id': 'MDQ6VXNlcjgxODMyMjc1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/81832275?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jiisa-k/followers', 'gh_following_url': 'https://api.github.com/users/jiisa-k/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jiisa-k/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jiisa-k/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jiisa-k/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jiisa-k/orgs', 'gh_repos_url': 'https://api.github.com/users/jiisa-k/repos', 'gh_events_url': 'https://api.github.com/users/jiisa-k/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jiisa-k/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T12:51:07Z', 'cntrb_full_name': 'Jigyasa Kumari'} +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO cntrb_id 0104e0a9-5300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 81832275+jiisa-k@users.noreply.github.com +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 81832275+jiisa-k@users.noreply.github.com +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2251cdba2f8155d31311696e11650f4e430db835 +2023-01-12 12:26:29 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2251cdba2f8155d31311696e11650f4e430db835 +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 28362836a@gmail.com +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100b728-7300-0000-0000-000000000000'), 'cntrb_login': 'Ulincsys', 'cntrb_created_at': '2015-04-18T04:55:05Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': '28362836a@gmail.com', 'gh_user_id': 12003443, 'gh_login': 'Ulincsys', 'gh_url': 'https://api.github.com/users/Ulincsys', 'gh_html_url': 'https://github.com/Ulincsys', 'gh_node_id': 'MDQ6VXNlcjEyMDAzNDQz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/12003443?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Ulincsys/followers', 'gh_following_url': 'https://api.github.com/users/Ulincsys/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Ulincsys/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Ulincsys/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Ulincsys/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Ulincsys/orgs', 'gh_repos_url': 'https://api.github.com/users/Ulincsys/repos', 'gh_events_url': 'https://api.github.com/users/Ulincsys/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Ulincsys/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T16:25:31Z', 'cntrb_full_name': 'John Kieran'} +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100b728-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 28362836a@gmail.com +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 28362836a@gmail.com +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 225c6499bb975bae63c8e0fcbabb01213333da0b +2023-01-12 12:26:30 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/225c6499bb975bae63c8e0fcbabb01213333da0b +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jacobeharding@gmail.com +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010005d2-4300-0000-0000-000000000000'), 'cntrb_login': 'jakeharding', 'cntrb_created_at': '2010-08-31T02:55:27Z', 'cntrb_email': 'jacobeharding@gmail.com', 'cntrb_company': 'Upstart', 'cntrb_location': 'Omaha, Nebraska', 'cntrb_canonical': 'jacobeharding@gmail.com', 'gh_user_id': 381507, 'gh_login': 'jakeharding', 'gh_url': 'https://api.github.com/users/jakeharding', 'gh_html_url': 'https://github.com/jakeharding', 'gh_node_id': 'MDQ6VXNlcjM4MTUwNw==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/381507?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jakeharding/followers', 'gh_following_url': 'https://api.github.com/users/jakeharding/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jakeharding/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jakeharding/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jakeharding/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jakeharding/orgs', 'gh_repos_url': 'https://api.github.com/users/jakeharding/repos', 'gh_events_url': 'https://api.github.com/users/jakeharding/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jakeharding/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-12T15:28:14Z', 'cntrb_full_name': 'Jake Harding'} +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO cntrb_id 010005d2-4300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jacobeharding@gmail.com +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jacobeharding@gmail.com +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 25fd09a1539141eedd64c7d778809b259a49cbbe +2023-01-12 12:26:31 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/25fd09a1539141eedd64c7d778809b259a49cbbe +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: lylynaheng@gmail.com +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01007b5e-cc00-0000-0000-000000000000'), 'cntrb_login': 'lylynaheng', 'cntrb_created_at': '2014-07-07T02:41:13Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Bay Area, CA', 'cntrb_canonical': 'lylynaheng@gmail.com', 'gh_user_id': 8085196, 'gh_login': 'lylynaheng', 'gh_url': 'https://api.github.com/users/lylynaheng', 'gh_html_url': 'https://github.com/lylynaheng', 'gh_node_id': 'MDQ6VXNlcjgwODUxOTY=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8085196?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/lylynaheng/followers', 'gh_following_url': 'https://api.github.com/users/lylynaheng/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/lylynaheng/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/lylynaheng/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/lylynaheng/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/lylynaheng/orgs', 'gh_repos_url': 'https://api.github.com/users/lylynaheng/repos', 'gh_events_url': 'https://api.github.com/users/lylynaheng/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/lylynaheng/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-24T01:04:13Z', 'cntrb_full_name': 'Lylyna Heng '} +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO cntrb_id 01007b5e-cc00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO Creating alias for email: lylynaheng@gmail.com +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO Updating now resolved email lylynaheng@gmail.com +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2787d361db8fca5b3fd44523e18dc9aac6557f10 +2023-01-12 12:26:32 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2787d361db8fca5b3fd44523e18dc9aac6557f10 +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 27910629+b-kamangara@users.noreply.github.com +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101a9e1-e500-0000-0000-000000000000'), 'cntrb_login': 'b-kamangara', 'cntrb_created_at': '2017-04-23T05:48:07Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '27910629+b-kamangara@users.noreply.github.com', 'gh_user_id': 27910629, 'gh_login': 'b-kamangara', 'gh_url': 'https://api.github.com/users/b-kamangara', 'gh_html_url': 'https://github.com/b-kamangara', 'gh_node_id': 'MDQ6VXNlcjI3OTEwNjI5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/27910629?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/b-kamangara/followers', 'gh_following_url': 'https://api.github.com/users/b-kamangara/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/b-kamangara/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/b-kamangara/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/b-kamangara/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/b-kamangara/orgs', 'gh_repos_url': 'https://api.github.com/users/b-kamangara/repos', 'gh_events_url': 'https://api.github.com/users/b-kamangara/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/b-kamangara/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-08-04T16:29:38Z', 'cntrb_full_name': 'Brenda Kamangara'} +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101a9e1-e500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 27910629+b-kamangara@users.noreply.github.com +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 27910629+b-kamangara@users.noreply.github.com +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2797cfa9cdc5fc82e6e02d1fd678ed132252157c +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2797cfa9cdc5fc82e6e02d1fd678ed132252157c +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Delight362', 'hash': '2797cfa9cdc5fc82e6e02d1fd678ed132252157c', 'email_raw': 'oprauredu@gmail.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=oprauredu@gmail.com+in:email+type:user +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from oprauredu@gmail.com +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'oprauredu@gmail.com', 'name': 'Delight362'} +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 28ac8751b1dd883b1f280b9f52697df3192d9e1b +2023-01-12 12:26:33 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/28ac8751b1dd883b1f280b9f52697df3192d9e1b +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: iyovcheva@vmware.com +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01003f7a-8500-0000-0000-000000000000'), 'cntrb_login': 'ivanayov', 'cntrb_created_at': '2013-04-15T11:38:21Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'iyovcheva@vmware.com', 'gh_user_id': 4160133, 'gh_login': 'ivanayov', 'gh_url': 'https://api.github.com/users/ivanayov', 'gh_html_url': 'https://github.com/ivanayov', 'gh_node_id': 'MDQ6VXNlcjQxNjAxMzM=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/4160133?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ivanayov/followers', 'gh_following_url': 'https://api.github.com/users/ivanayov/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ivanayov/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ivanayov/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ivanayov/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ivanayov/orgs', 'gh_repos_url': 'https://api.github.com/users/ivanayov/repos', 'gh_events_url': 'https://api.github.com/users/ivanayov/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ivanayov/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-06T14:27:57Z', 'cntrb_full_name': 'Ivana Atanasova'} +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO cntrb_id 01003f7a-8500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO Creating alias for email: iyovcheva@vmware.com +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO Updating now resolved email iyovcheva@vmware.com +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2ac5a064a0a79f5585545f84a8001806ae56311c +2023-01-12 12:26:34 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2ac5a064a0a79f5585545f84a8001806ae56311c +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Ubuntu', 'hash': '2ac5a064a0a79f5585545f84a8001806ae56311c', 'email_raw': 'ubuntu@ip-172-31-35-22.ec2.internal', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=ubuntu@ip-172-31-35-22.ec2.internal+in:email+type:user +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from ubuntu@ip-172-31-35-22.ec2.internal +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'ubuntu@ip-172-31-35-22.ec2.internal', 'name': 'Ubuntu'} +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:26:35 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: iyovcheva@iyovcheva-a02.vmware.com +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01003f7a-8500-0000-0000-000000000000'), 'cntrb_login': 'ivanayov', 'cntrb_created_at': '2013-04-15T11:38:21Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'iyovcheva@iyovcheva-a02.vmware.com', 'gh_user_id': 4160133, 'gh_login': 'ivanayov', 'gh_url': 'https://api.github.com/users/ivanayov', 'gh_html_url': 'https://github.com/ivanayov', 'gh_node_id': 'MDQ6VXNlcjQxNjAxMzM=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/4160133?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ivanayov/followers', 'gh_following_url': 'https://api.github.com/users/ivanayov/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ivanayov/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ivanayov/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ivanayov/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ivanayov/orgs', 'gh_repos_url': 'https://api.github.com/users/ivanayov/repos', 'gh_events_url': 'https://api.github.com/users/ivanayov/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ivanayov/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-06T14:27:57Z', 'cntrb_full_name': 'Ivana Atanasova'} +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO cntrb_id 01003f7a-8500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Creating alias for email: iyovcheva@iyovcheva-a02.vmware.com +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Updating now resolved email iyovcheva@iyovcheva-a02.vmware.com +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2dbaee91c25560910143bb32fc9727ac60bdc441 +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2dbaee91c25560910143bb32fc9727ac60bdc441 +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: will@mlh.io +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102083f-4900-0000-0000-000000000000'), 'cntrb_login': 'wrussell1999', 'cntrb_created_at': '2017-11-29T11:50:08Z', 'cntrb_email': 'will@wrussell.co.uk', 'cntrb_company': '@MLH-Fellowship', 'cntrb_location': 'London, UK', 'cntrb_canonical': 'will@wrussell.co.uk', 'gh_user_id': 34094921, 'gh_login': 'wrussell1999', 'gh_url': 'https://api.github.com/users/wrussell1999', 'gh_html_url': 'https://github.com/wrussell1999', 'gh_node_id': 'MDQ6VXNlcjM0MDk0OTIx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/34094921?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/wrussell1999/followers', 'gh_following_url': 'https://api.github.com/users/wrussell1999/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/wrussell1999/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/wrussell1999/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/wrussell1999/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/wrussell1999/orgs', 'gh_repos_url': 'https://api.github.com/users/wrussell1999/repos', 'gh_events_url': 'https://api.github.com/users/wrussell1999/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/wrussell1999/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-14T15:02:40Z', 'cntrb_full_name': 'Will Russell'} +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102083f-4900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Creating alias for email: will@mlh.io +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Updating now resolved email will@mlh.io +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2f382419f7a2f0b198ff83d65ff409545239f8c1 +2023-01-12 12:26:36 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2f382419f7a2f0b198ff83d65ff409545239f8c1 +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: hashimchaudry23@gmail.com +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010380f7-ab00-0000-0000-000000000000'), 'cntrb_login': 'mHash1m', 'cntrb_created_at': '2019-12-11T16:27:48Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Pakistan', 'cntrb_canonical': 'hashimchaudry23@gmail.com', 'gh_user_id': 58783659, 'gh_login': 'mHash1m', 'gh_url': 'https://api.github.com/users/mHash1m', 'gh_html_url': 'https://github.com/mHash1m', 'gh_node_id': 'MDQ6VXNlcjU4NzgzNjU5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/58783659?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mHash1m/followers', 'gh_following_url': 'https://api.github.com/users/mHash1m/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mHash1m/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mHash1m/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mHash1m/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mHash1m/orgs', 'gh_repos_url': 'https://api.github.com/users/mHash1m/repos', 'gh_events_url': 'https://api.github.com/users/mHash1m/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mHash1m/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-24T10:08:28Z', 'cntrb_full_name': 'Hashim'} +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO cntrb_id 010380f7-ab00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO Creating alias for email: hashimchaudry23@gmail.com +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO Updating now resolved email hashimchaudry23@gmail.com +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 2fddc04eaf98c12387876607005689368125faba +2023-01-12 12:26:37 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/2fddc04eaf98c12387876607005689368125faba +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: nynalaalekhya@gmail.com +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102fc61-5400-0000-0000-000000000000'), 'cntrb_login': 'nynaalekhya', 'cntrb_created_at': '2019-04-29T05:23:49Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'San Jose, CA', 'cntrb_canonical': 'nynalaalekhya@gmail.com', 'gh_user_id': 50094420, 'gh_login': 'nynaalekhya', 'gh_url': 'https://api.github.com/users/nynaalekhya', 'gh_html_url': 'https://github.com/nynaalekhya', 'gh_node_id': 'MDQ6VXNlcjUwMDk0NDIw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/50094420?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/nynaalekhya/followers', 'gh_following_url': 'https://api.github.com/users/nynaalekhya/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/nynaalekhya/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/nynaalekhya/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/nynaalekhya/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/nynaalekhya/orgs', 'gh_repos_url': 'https://api.github.com/users/nynaalekhya/repos', 'gh_events_url': 'https://api.github.com/users/nynaalekhya/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/nynaalekhya/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-14T17:15:22Z', 'cntrb_full_name': 'nynalaalekhya'} +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102fc61-5400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO Creating alias for email: nynalaalekhya@gmail.com +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO Updating now resolved email nynalaalekhya@gmail.com +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 35064908066c9d48f8dd7e7a287b3d60959071d7 +2023-01-12 12:26:38 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/35064908066c9d48f8dd7e7a287b3d60959071d7 +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Spencer Robinson', 'hash': '35064908066c9d48f8dd7e7a287b3d60959071d7', 'email_raw': 'spencerrrobinson@unomaha.edu', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=spencerrrobinson@unomaha.edu+in:email+type:user +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from spencerrrobinson@unomaha.edu +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'spencerrrobinson@unomaha.edu', 'name': 'Spencer Robinson'} +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:39 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'TheRedBerrys', 'id': 1714960, 'node_id': 'MDQ6VXNlcjE3MTQ5NjA=', 'avatar_url': 'https://avatars.githubusercontent.com/u/1714960?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/TheRedBerrys', 'html_url': 'https://github.com/TheRedBerrys', 'followers_url': 'https://api.github.com/users/TheRedBerrys/followers', 'following_url': 'https://api.github.com/users/TheRedBerrys/following{/other_user}', 'gists_url': 'https://api.github.com/users/TheRedBerrys/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/TheRedBerrys/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/TheRedBerrys/subscriptions', 'organizations_url': 'https://api.github.com/users/TheRedBerrys/orgs', 'repos_url': 'https://api.github.com/users/TheRedBerrys/repos', 'events_url': 'https://api.github.com/users/TheRedBerrys/events{/privacy}', 'received_events_url': 'https://api.github.com/users/TheRedBerrys/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: spencerrrobinson@unomaha.edu +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01001a2b-1000-0000-0000-000000000000'), 'cntrb_login': 'TheRedBerrys', 'cntrb_created_at': '2012-05-07T22:25:20Z', 'cntrb_email': 'theredberrys@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'theredberrys@gmail.com', 'gh_user_id': 1714960, 'gh_login': 'TheRedBerrys', 'gh_url': 'https://api.github.com/users/TheRedBerrys', 'gh_html_url': 'https://github.com/TheRedBerrys', 'gh_node_id': 'MDQ6VXNlcjE3MTQ5NjA=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1714960?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/TheRedBerrys/followers', 'gh_following_url': 'https://api.github.com/users/TheRedBerrys/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/TheRedBerrys/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/TheRedBerrys/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/TheRedBerrys/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/TheRedBerrys/orgs', 'gh_repos_url': 'https://api.github.com/users/TheRedBerrys/repos', 'gh_events_url': 'https://api.github.com/users/TheRedBerrys/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/TheRedBerrys/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-20T16:10:26Z', 'cntrb_full_name': 'Spencer Robinson'} +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO cntrb_id 01001a2b-1000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO Creating alias for email: spencerrrobinson@unomaha.edu +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO Updating now resolved email spencerrrobinson@unomaha.edu +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 35f77493bf741affe7644641c5565658602a6c54 +2023-01-12 12:26:40 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/35f77493bf741affe7644641c5565658602a6c54 +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: btech10079.19@bitmesra.ac.in +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01043684-7b00-0000-0000-000000000000'), 'cntrb_login': 'Quickbeasts51429', 'cntrb_created_at': '2020-09-03T04:49:23Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'btech10079.19@bitmesra.ac.in', 'gh_user_id': 70681723, 'gh_login': 'Quickbeasts51429', 'gh_url': 'https://api.github.com/users/Quickbeasts51429', 'gh_html_url': 'https://github.com/Quickbeasts51429', 'gh_node_id': 'MDQ6VXNlcjcwNjgxNzIz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/70681723?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Quickbeasts51429/followers', 'gh_following_url': 'https://api.github.com/users/Quickbeasts51429/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Quickbeasts51429/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Quickbeasts51429/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Quickbeasts51429/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Quickbeasts51429/orgs', 'gh_repos_url': 'https://api.github.com/users/Quickbeasts51429/repos', 'gh_events_url': 'https://api.github.com/users/Quickbeasts51429/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Quickbeasts51429/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-16T13:27:12Z', 'cntrb_full_name': 'SOHINI SHAH'} +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO cntrb_id 01043684-7b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Creating alias for email: btech10079.19@bitmesra.ac.in +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Updating now resolved email btech10079.19@bitmesra.ac.in +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 3a5b24720610404c6c2908398d6ea73063eefa9f +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/3a5b24720610404c6c2908398d6ea73063eefa9f +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: suleemmanuella@yahoo.com +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103dfb4-d700-0000-0000-000000000000'), 'cntrb_login': 'Nene-S', 'cntrb_created_at': '2020-05-07T22:43:03Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'suleemmanuella@yahoo.com', 'gh_user_id': 64992471, 'gh_login': 'Nene-S', 'gh_url': 'https://api.github.com/users/Nene-S', 'gh_html_url': 'https://github.com/Nene-S', 'gh_node_id': 'MDQ6VXNlcjY0OTkyNDcx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/64992471?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Nene-S/followers', 'gh_following_url': 'https://api.github.com/users/Nene-S/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Nene-S/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Nene-S/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Nene-S/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Nene-S/orgs', 'gh_repos_url': 'https://api.github.com/users/Nene-S/repos', 'gh_events_url': 'https://api.github.com/users/Nene-S/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Nene-S/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-08T04:39:23Z', 'cntrb_full_name': 'Emmanuella Sule'} +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103dfb4-d700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Creating alias for email: suleemmanuella@yahoo.com +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:41 blueberry insert_facade_contributors[59440] INFO Updating now resolved email suleemmanuella@yahoo.com +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 3c4c0e4fc54f0199d64562ed4b7ce3261c0ce000 +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/3c4c0e4fc54f0199d64562ed4b7ce3261c0ce000 +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 45593458+rochisha0@users.noreply.github.com +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102b7b3-7200-0000-0000-000000000000'), 'cntrb_login': 'rochisha0', 'cntrb_created_at': '2018-12-04T10:54:24Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Indian Institute of Technology, Roorkee', 'cntrb_canonical': '45593458+rochisha0@users.noreply.github.com', 'gh_user_id': 45593458, 'gh_login': 'rochisha0', 'gh_url': 'https://api.github.com/users/rochisha0', 'gh_html_url': 'https://github.com/rochisha0', 'gh_node_id': 'MDQ6VXNlcjQ1NTkzNDU4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/45593458?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/rochisha0/followers', 'gh_following_url': 'https://api.github.com/users/rochisha0/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/rochisha0/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/rochisha0/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/rochisha0/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/rochisha0/orgs', 'gh_repos_url': 'https://api.github.com/users/rochisha0/repos', 'gh_events_url': 'https://api.github.com/users/rochisha0/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/rochisha0/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-15T16:17:46Z', 'cntrb_full_name': 'Rochisha Agarwal'} +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102b7b3-7200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 45593458+rochisha0@users.noreply.github.com +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 45593458+rochisha0@users.noreply.github.com +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 3d81aeac5c3126e8e5277425c054c139b688e6ed +2023-01-12 12:26:42 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/3d81aeac5c3126e8e5277425c054c139b688e6ed +2023-01-12 12:26:43 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: ekaxada@gmail.com +2023-01-12 12:26:43 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010339ac-4300-0000-0000-000000000000'), 'cntrb_login': 'kaxada', 'cntrb_created_at': '2019-08-14T14:36:30Z', 'cntrb_email': 'ekaxada@gmail.com', 'cntrb_company': '@campaignity', 'cntrb_location': 'Uganda', 'cntrb_canonical': 'ekaxada@gmail.com', 'gh_user_id': 54111299, 'gh_login': 'kaxada', 'gh_url': 'https://api.github.com/users/kaxada', 'gh_html_url': 'https://github.com/kaxada', 'gh_node_id': 'MDQ6VXNlcjU0MTExMjk5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/54111299?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/kaxada/followers', 'gh_following_url': 'https://api.github.com/users/kaxada/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/kaxada/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/kaxada/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/kaxada/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/kaxada/orgs', 'gh_repos_url': 'https://api.github.com/users/kaxada/repos', 'gh_events_url': 'https://api.github.com/users/kaxada/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/kaxada/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-01T20:06:18Z', 'cntrb_full_name': 'Kaxada'} +2023-01-12 12:26:43 blueberry insert_facade_contributors[59440] INFO cntrb_id 010339ac-4300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:43 blueberry insert_facade_contributors[59440] INFO Creating alias for email: ekaxada@gmail.com +2023-01-12 12:26:43 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:43 blueberry insert_facade_contributors[59440] INFO Updating now resolved email ekaxada@gmail.com +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: abhinavbajpai2012@users.noreply.github.com +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010173a4-2900-0000-0000-000000000000'), 'cntrb_login': 'abhinavbajpai2012', 'cntrb_created_at': '2016-12-03T14:22:04Z', 'cntrb_email': None, 'cntrb_company': 'Indian Institute of Technology Dhanbad', 'cntrb_location': 'Dhanbad', 'cntrb_canonical': 'abhinavbajpai2012@users.noreply.github.com', 'gh_user_id': 24355881, 'gh_login': 'abhinavbajpai2012', 'gh_url': 'https://api.github.com/users/abhinavbajpai2012', 'gh_html_url': 'https://github.com/abhinavbajpai2012', 'gh_node_id': 'MDQ6VXNlcjI0MzU1ODgx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24355881?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/abhinavbajpai2012/followers', 'gh_following_url': 'https://api.github.com/users/abhinavbajpai2012/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/abhinavbajpai2012/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/abhinavbajpai2012/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/abhinavbajpai2012/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/abhinavbajpai2012/orgs', 'gh_repos_url': 'https://api.github.com/users/abhinavbajpai2012/repos', 'gh_events_url': 'https://api.github.com/users/abhinavbajpai2012/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/abhinavbajpai2012/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-03-23T18:02:17Z', 'cntrb_full_name': 'Abhinav Bajpai'} +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO cntrb_id 010173a4-2900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Creating alias for email: abhinavbajpai2012@users.noreply.github.com +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Updating now resolved email abhinavbajpai2012@users.noreply.github.com +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 49815a9ae10b8bf1d48d52bfb1f4a1e8c99fb8f0 +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/49815a9ae10b8bf1d48d52bfb1f4a1e8c99fb8f0 +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:44 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'shivani parihar', 'hash': '49815a9ae10b8bf1d48d52bfb1f4a1e8c99fb8f0', 'email_raw': 'shivaniparihar1502@gmail.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=shivaniparihar1502@gmail.com+in:email+type:user +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from shivaniparihar1502@gmail.com +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'shivaniparihar1502@gmail.com', 'name': 'shivani parihar'} +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'Shivani-Parihar99', 'id': 81503636, 'node_id': 'MDQ6VXNlcjgxNTAzNjM2', 'avatar_url': 'https://avatars.githubusercontent.com/u/81503636?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/Shivani-Parihar99', 'html_url': 'https://github.com/Shivani-Parihar99', 'followers_url': 'https://api.github.com/users/Shivani-Parihar99/followers', 'following_url': 'https://api.github.com/users/Shivani-Parihar99/following{/other_user}', 'gists_url': 'https://api.github.com/users/Shivani-Parihar99/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/Shivani-Parihar99/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/Shivani-Parihar99/subscriptions', 'organizations_url': 'https://api.github.com/users/Shivani-Parihar99/orgs', 'repos_url': 'https://api.github.com/users/Shivani-Parihar99/repos', 'events_url': 'https://api.github.com/users/Shivani-Parihar99/events{/privacy}', 'received_events_url': 'https://api.github.com/users/Shivani-Parihar99/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: shivaniparihar1502@gmail.com +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0104dba5-9400-0000-0000-000000000000'), 'cntrb_login': 'Shivani-Parihar99', 'cntrb_created_at': '2021-03-28T05:50:19Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'shivaniparihar1502@gmail.com', 'gh_user_id': 81503636, 'gh_login': 'Shivani-Parihar99', 'gh_url': 'https://api.github.com/users/Shivani-Parihar99', 'gh_html_url': 'https://github.com/Shivani-Parihar99', 'gh_node_id': 'MDQ6VXNlcjgxNTAzNjM2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/81503636?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Shivani-Parihar99/followers', 'gh_following_url': 'https://api.github.com/users/Shivani-Parihar99/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Shivani-Parihar99/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Shivani-Parihar99/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Shivani-Parihar99/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Shivani-Parihar99/orgs', 'gh_repos_url': 'https://api.github.com/users/Shivani-Parihar99/repos', 'gh_events_url': 'https://api.github.com/users/Shivani-Parihar99/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Shivani-Parihar99/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-10T14:42:43Z', 'cntrb_full_name': 'Shivani Parihar'} +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO cntrb_id 0104dba5-9400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Creating alias for email: shivaniparihar1502@gmail.com +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Updating now resolved email shivaniparihar1502@gmail.com +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 4999ccf351721719a740791e78cfe0928162e63d +2023-01-12 12:26:45 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/4999ccf351721719a740791e78cfe0928162e63d +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: benjaminparish628@gmail.com +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008197-ab00-0000-0000-000000000000'), 'cntrb_login': 'bparish628', 'cntrb_created_at': '2014-08-19T15:02:39Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'benjaminparish628@gmail.com', 'gh_user_id': 8492971, 'gh_login': 'bparish628', 'gh_url': 'https://api.github.com/users/bparish628', 'gh_html_url': 'https://github.com/bparish628', 'gh_node_id': 'MDQ6VXNlcjg0OTI5NzE=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8492971?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/bparish628/followers', 'gh_following_url': 'https://api.github.com/users/bparish628/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/bparish628/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/bparish628/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/bparish628/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/bparish628/orgs', 'gh_repos_url': 'https://api.github.com/users/bparish628/repos', 'gh_events_url': 'https://api.github.com/users/bparish628/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/bparish628/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-17T03:16:25Z', 'cntrb_full_name': 'Benjamin Parish'} +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008197-ab00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO Creating alias for email: benjaminparish628@gmail.com +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO Updating now resolved email benjaminparish628@gmail.com +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 4bbb17dd3b05de35bf302e604709bf8fac9addbf +2023-01-12 12:26:46 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/4bbb17dd3b05de35bf302e604709bf8fac9addbf +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Isaac Milarsky', 'hash': '4bbb17dd3b05de35bf302e604709bf8fac9addbf', 'email_raw': 'isaac@isaac-suse.dhcp.missouri.edu', 'resolution_status': 'not_unresolved'} +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=isaac@isaac-suse.dhcp.missouri.edu+in:email+type:user +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from isaac@isaac-suse.dhcp.missouri.edu +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'isaac@isaac-suse.dhcp.missouri.edu', 'name': 'Isaac Milarsky'} +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:26:47 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'IsaacMilarky', 'id': 24639268, 'node_id': 'MDQ6VXNlcjI0NjM5MjY4', 'avatar_url': 'https://avatars.githubusercontent.com/u/24639268?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/IsaacMilarky', 'html_url': 'https://github.com/IsaacMilarky', 'followers_url': 'https://api.github.com/users/IsaacMilarky/followers', 'following_url': 'https://api.github.com/users/IsaacMilarky/following{/other_user}', 'gists_url': 'https://api.github.com/users/IsaacMilarky/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/IsaacMilarky/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/IsaacMilarky/subscriptions', 'organizations_url': 'https://api.github.com/users/IsaacMilarky/orgs', 'repos_url': 'https://api.github.com/users/IsaacMilarky/repos', 'events_url': 'https://api.github.com/users/IsaacMilarky/events{/privacy}', 'received_events_url': 'https://api.github.com/users/IsaacMilarky/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: isaac@isaac-suse.dhcp.missouri.edu +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010177f7-2400-0000-0000-000000000000'), 'cntrb_login': 'IsaacMilarky', 'cntrb_created_at': '2016-12-18T19:35:40Z', 'cntrb_email': 'imilarsky@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'imilarsky@gmail.com', 'gh_user_id': 24639268, 'gh_login': 'IsaacMilarky', 'gh_url': 'https://api.github.com/users/IsaacMilarky', 'gh_html_url': 'https://github.com/IsaacMilarky', 'gh_node_id': 'MDQ6VXNlcjI0NjM5MjY4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24639268?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/IsaacMilarky/followers', 'gh_following_url': 'https://api.github.com/users/IsaacMilarky/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/IsaacMilarky/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/IsaacMilarky/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/IsaacMilarky/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/IsaacMilarky/orgs', 'gh_repos_url': 'https://api.github.com/users/IsaacMilarky/repos', 'gh_events_url': 'https://api.github.com/users/IsaacMilarky/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/IsaacMilarky/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-29T17:34:46Z', 'cntrb_full_name': 'Isaac Milarsky'} +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO cntrb_id 010177f7-2400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Creating alias for email: isaac@isaac-suse.dhcp.missouri.edu +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Updating now resolved email isaac@isaac-suse.dhcp.missouri.edu +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 4beb303d528821c5aa01dc38b0f74f2c080a66de +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/4beb303d528821c5aa01dc38b0f74f2c080a66de +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: dawn@dawnfoster.com +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010013e6-eb00-0000-0000-000000000000'), 'cntrb_login': 'geekygirldawn', 'cntrb_created_at': '2012-01-04T16:20:26Z', 'cntrb_email': 'fosterd@vmware.com', 'cntrb_company': 'VMware', 'cntrb_location': 'London, UK', 'cntrb_canonical': 'fosterd@vmware.com', 'gh_user_id': 1304299, 'gh_login': 'geekygirldawn', 'gh_url': 'https://api.github.com/users/geekygirldawn', 'gh_html_url': 'https://github.com/geekygirldawn', 'gh_node_id': 'MDQ6VXNlcjEzMDQyOTk=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1304299?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/geekygirldawn/followers', 'gh_following_url': 'https://api.github.com/users/geekygirldawn/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/geekygirldawn/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/geekygirldawn/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/geekygirldawn/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/geekygirldawn/orgs', 'gh_repos_url': 'https://api.github.com/users/geekygirldawn/repos', 'gh_events_url': 'https://api.github.com/users/geekygirldawn/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/geekygirldawn/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T15:57:50Z', 'cntrb_full_name': 'Dawn Foster'} +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO cntrb_id 010013e6-eb00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Creating alias for email: dawn@dawnfoster.com +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:48 blueberry insert_facade_contributors[59440] INFO Updating now resolved email dawn@dawnfoster.com +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 4d2e73fdead2cb52aef4c2eec8458da67519d4f5 +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/4d2e73fdead2cb52aef4c2eec8458da67519d4f5 +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: suzanne@suuu.us +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01026ee6-5300-0000-0000-000000000000'), 'cntrb_login': 'suuus', 'cntrb_created_at': '2018-07-04T08:26:38Z', 'cntrb_email': 'suzanne@suuu.us', 'cntrb_company': '@Spotify', 'cntrb_location': 'Amsterdam, NL', 'cntrb_canonical': 'suzanne@suuu.us', 'gh_user_id': 40822355, 'gh_login': 'suuus', 'gh_url': 'https://api.github.com/users/suuus', 'gh_html_url': 'https://github.com/suuus', 'gh_node_id': 'MDQ6VXNlcjQwODIyMzU1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/40822355?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/suuus/followers', 'gh_following_url': 'https://api.github.com/users/suuus/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/suuus/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/suuus/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/suuus/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/suuus/orgs', 'gh_repos_url': 'https://api.github.com/users/suuus/repos', 'gh_events_url': 'https://api.github.com/users/suuus/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/suuus/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T13:46:46Z', 'cntrb_full_name': 'Suzanne Daniels'} +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO cntrb_id 01026ee6-5300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Creating alias for email: suzanne@suuu.us +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Updating now resolved email suzanne@suuu.us +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 51942b62df9afd74419c2f6ef719ff6a654f1111 +2023-01-12 12:26:49 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/51942b62df9afd74419c2f6ef719ff6a654f1111 +2023-01-12 12:26:50 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: kunlefashmayop@gmail.com +2023-01-12 12:26:50 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01052469-0900-0000-0000-000000000000'), 'cntrb_login': 'kunlefash', 'cntrb_created_at': '2021-06-22T00:33:15Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Lagos, Nigeria', 'cntrb_canonical': 'kunlefashmayop@gmail.com', 'gh_user_id': 86272265, 'gh_login': 'kunlefash', 'gh_url': 'https://api.github.com/users/kunlefash', 'gh_html_url': 'https://github.com/kunlefash', 'gh_node_id': 'MDQ6VXNlcjg2MjcyMjY1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/86272265?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/kunlefash/followers', 'gh_following_url': 'https://api.github.com/users/kunlefash/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/kunlefash/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/kunlefash/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/kunlefash/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/kunlefash/orgs', 'gh_repos_url': 'https://api.github.com/users/kunlefash/repos', 'gh_events_url': 'https://api.github.com/users/kunlefash/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/kunlefash/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-25T07:16:53Z', 'cntrb_full_name': 'Fasakin Adekunle'} +2023-01-12 12:26:50 blueberry insert_facade_contributors[59440] INFO cntrb_id 01052469-0900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:50 blueberry insert_facade_contributors[59440] INFO Creating alias for email: kunlefashmayop@gmail.com +2023-01-12 12:26:50 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:50 blueberry insert_facade_contributors[59440] INFO Updating now resolved email kunlefashmayop@gmail.com +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: dhruvsachdev@Dhruvs-MacBook-Air.local +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010359e6-0a00-0000-0000-000000000000'), 'cntrb_login': 'Dhruv-Sachdev1313', 'cntrb_created_at': '2019-10-06T12:20:20Z', 'cntrb_email': None, 'cntrb_company': 'BeyondIRR', 'cntrb_location': 'Mumbai, India', 'cntrb_canonical': 'dhruvsachdev@Dhruvs-MacBook-Air.local', 'gh_user_id': 56223242, 'gh_login': 'Dhruv-Sachdev1313', 'gh_url': 'https://api.github.com/users/Dhruv-Sachdev1313', 'gh_html_url': 'https://github.com/Dhruv-Sachdev1313', 'gh_node_id': 'MDQ6VXNlcjU2MjIzMjQy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/56223242?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Dhruv-Sachdev1313/followers', 'gh_following_url': 'https://api.github.com/users/Dhruv-Sachdev1313/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Dhruv-Sachdev1313/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Dhruv-Sachdev1313/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Dhruv-Sachdev1313/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Dhruv-Sachdev1313/orgs', 'gh_repos_url': 'https://api.github.com/users/Dhruv-Sachdev1313/repos', 'gh_events_url': 'https://api.github.com/users/Dhruv-Sachdev1313/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Dhruv-Sachdev1313/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T13:08:49Z', 'cntrb_full_name': 'Dhruv Sachdev'} +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO cntrb_id 010359e6-0a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO Creating alias for email: dhruvsachdev@Dhruvs-MacBook-Air.local +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO Updating now resolved email dhruvsachdev@Dhruvs-MacBook-Air.local +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 59ef1a4a62c27da7df99e02a2762037901c36650 +2023-01-12 12:26:51 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/59ef1a4a62c27da7df99e02a2762037901c36650 +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mscherer@users.noreply.github.com +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000383-bf00-0000-0000-000000000000'), 'cntrb_login': 'mscherer', 'cntrb_created_at': '2010-03-25T16:35:22Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'mscherer@users.noreply.github.com', 'gh_user_id': 230335, 'gh_login': 'mscherer', 'gh_url': 'https://api.github.com/users/mscherer', 'gh_html_url': 'https://github.com/mscherer', 'gh_node_id': 'MDQ6VXNlcjIzMDMzNQ==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/230335?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mscherer/followers', 'gh_following_url': 'https://api.github.com/users/mscherer/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mscherer/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mscherer/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mscherer/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mscherer/orgs', 'gh_repos_url': 'https://api.github.com/users/mscherer/repos', 'gh_events_url': 'https://api.github.com/users/mscherer/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mscherer/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T10:32:01Z', 'cntrb_full_name': 'mscherer'} +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000383-bf00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mscherer@users.noreply.github.com +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mscherer@users.noreply.github.com +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 5dfd2f5aff37e65ceb0abb76d5875f2e32efa689 +2023-01-12 12:26:52 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/5dfd2f5aff37e65ceb0abb76d5875f2e32efa689 +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: n.trish00@gmail.com +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01035b87-9500-0000-0000-000000000000'), 'cntrb_login': 'witchtrish', 'cntrb_created_at': '2019-10-09T02:27:35Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'n.trish00@gmail.com', 'gh_user_id': 56330133, 'gh_login': 'witchtrish', 'gh_url': 'https://api.github.com/users/witchtrish', 'gh_html_url': 'https://github.com/witchtrish', 'gh_node_id': 'MDQ6VXNlcjU2MzMwMTMz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/56330133?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/witchtrish/followers', 'gh_following_url': 'https://api.github.com/users/witchtrish/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/witchtrish/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/witchtrish/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/witchtrish/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/witchtrish/orgs', 'gh_repos_url': 'https://api.github.com/users/witchtrish/repos', 'gh_events_url': 'https://api.github.com/users/witchtrish/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/witchtrish/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-31T00:38:42Z', 'cntrb_full_name': 'Trish Nguyen'} +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO cntrb_id 01035b87-9500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Creating alias for email: n.trish00@gmail.com +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Updating now resolved email n.trish00@gmail.com +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6117a85c38b47040178cdc325abc12ab0545a644 +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/6117a85c38b47040178cdc325abc12ab0545a644 +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jacoblagesse@gmail.com +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010325ad-b900-0000-0000-000000000000'), 'cntrb_login': 'jacoblagesse', 'cntrb_created_at': '2019-07-11T20:04:26Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'jacoblagesse@gmail.com', 'gh_user_id': 52800953, 'gh_login': 'jacoblagesse', 'gh_url': 'https://api.github.com/users/jacoblagesse', 'gh_html_url': 'https://github.com/jacoblagesse', 'gh_node_id': 'MDQ6VXNlcjUyODAwOTUz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/52800953?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jacoblagesse/followers', 'gh_following_url': 'https://api.github.com/users/jacoblagesse/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jacoblagesse/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jacoblagesse/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jacoblagesse/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jacoblagesse/orgs', 'gh_repos_url': 'https://api.github.com/users/jacoblagesse/repos', 'gh_events_url': 'https://api.github.com/users/jacoblagesse/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jacoblagesse/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-25T12:46:31Z', 'cntrb_full_name': 'Jacob LaGesse'} +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO cntrb_id 010325ad-b900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jacoblagesse@gmail.com +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:53 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jacoblagesse@gmail.com +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 67dbced68ebc5a64235dfe126e3804a4fbe09399 +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/67dbced68ebc5a64235dfe126e3804a4fbe09399 +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: peterjmartin2000@gmail.com +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101f42c-1000-0000-0000-000000000000'), 'cntrb_login': 'petermart', 'cntrb_created_at': '2017-10-13T18:54:02Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'peterjmartin2000@gmail.com', 'gh_user_id': 32779280, 'gh_login': 'petermart', 'gh_url': 'https://api.github.com/users/petermart', 'gh_html_url': 'https://github.com/petermart', 'gh_node_id': 'MDQ6VXNlcjMyNzc5Mjgw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/32779280?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/petermart/followers', 'gh_following_url': 'https://api.github.com/users/petermart/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/petermart/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/petermart/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/petermart/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/petermart/orgs', 'gh_repos_url': 'https://api.github.com/users/petermart/repos', 'gh_events_url': 'https://api.github.com/users/petermart/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/petermart/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-14T00:13:19Z', 'cntrb_full_name': 'Peter Martin'} +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101f42c-1000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Creating alias for email: peterjmartin2000@gmail.com +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Updating now resolved email peterjmartin2000@gmail.com +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6a373ee60038de90b983f15522bc562f685932f6 +2023-01-12 12:26:54 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/6a373ee60038de90b983f15522bc562f685932f6 +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: akotharriet5@gmail.com +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102a825-ac00-0000-0000-000000000000'), 'cntrb_login': 'HarrietAkot', 'cntrb_created_at': '2018-10-29T12:24:03Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'akotharriet5@gmail.com', 'gh_user_id': 44574124, 'gh_login': 'HarrietAkot', 'gh_url': 'https://api.github.com/users/HarrietAkot', 'gh_html_url': 'https://github.com/HarrietAkot', 'gh_node_id': 'MDQ6VXNlcjQ0NTc0MTI0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/44574124?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/HarrietAkot/followers', 'gh_following_url': 'https://api.github.com/users/HarrietAkot/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/HarrietAkot/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/HarrietAkot/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/HarrietAkot/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/HarrietAkot/orgs', 'gh_repos_url': 'https://api.github.com/users/HarrietAkot/repos', 'gh_events_url': 'https://api.github.com/users/HarrietAkot/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/HarrietAkot/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-30T12:10:09Z', 'cntrb_full_name': 'harrietakot'} +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102a825-ac00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO Creating alias for email: akotharriet5@gmail.com +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO Updating now resolved email akotharriet5@gmail.com +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6b52b88ad3536f39c4275cd4730043669db0b024 +2023-01-12 12:26:55 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/6b52b88ad3536f39c4275cd4730043669db0b024 +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 61616662+Ayushdubey86@users.noreply.github.com +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103ac32-1600-0000-0000-000000000000'), 'cntrb_login': 'Ayushdubey86', 'cntrb_created_at': '2020-02-29T08:46:54Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '61616662+Ayushdubey86@users.noreply.github.com', 'gh_user_id': 61616662, 'gh_login': 'Ayushdubey86', 'gh_url': 'https://api.github.com/users/Ayushdubey86', 'gh_html_url': 'https://github.com/Ayushdubey86', 'gh_node_id': 'MDQ6VXNlcjYxNjE2NjYy', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/61616662?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Ayushdubey86/followers', 'gh_following_url': 'https://api.github.com/users/Ayushdubey86/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Ayushdubey86/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Ayushdubey86/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Ayushdubey86/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Ayushdubey86/orgs', 'gh_repos_url': 'https://api.github.com/users/Ayushdubey86/repos', 'gh_events_url': 'https://api.github.com/users/Ayushdubey86/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Ayushdubey86/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-05T16:11:14Z', 'cntrb_full_name': 'Ayush Dubey'} +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103ac32-1600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 61616662+Ayushdubey86@users.noreply.github.com +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 61616662+Ayushdubey86@users.noreply.github.com +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6ce7b0f361e78585245b25639fb86cc7730d3ca3 +2023-01-12 12:26:56 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/6ce7b0f361e78585245b25639fb86cc7730d3ca3 +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 100202374+yashtikajigs@users.noreply.github.com +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0105f8f7-8600-0000-0000-000000000000'), 'cntrb_login': 'yashtikajigs', 'cntrb_created_at': '2022-02-22T10:57:53Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '100202374+yashtikajigs@users.noreply.github.com', 'gh_user_id': 100202374, 'gh_login': 'yashtikajigs', 'gh_url': 'https://api.github.com/users/yashtikajigs', 'gh_html_url': 'https://github.com/yashtikajigs', 'gh_node_id': 'U_kgDOBfj3hg', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/100202374?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/yashtikajigs/followers', 'gh_following_url': 'https://api.github.com/users/yashtikajigs/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/yashtikajigs/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/yashtikajigs/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/yashtikajigs/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/yashtikajigs/orgs', 'gh_repos_url': 'https://api.github.com/users/yashtikajigs/repos', 'gh_events_url': 'https://api.github.com/users/yashtikajigs/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/yashtikajigs/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-04-14T09:25:52Z', 'cntrb_full_name': 'Yashtika Jigyasu'} +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO cntrb_id 0105f8f7-8600-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 100202374+yashtikajigs@users.noreply.github.com +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 100202374+yashtikajigs@users.noreply.github.com +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruff@mwc-021238.dhcp.missouri.edu +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008337-7a00-0000-0000-000000000000'), 'cntrb_login': 'woodruff', 'cntrb_created_at': '2014-08-30T15:05:47Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'michaelwoodruff@mwc-021238.dhcp.missouri.edu', 'gh_user_id': 8599418, 'gh_login': 'woodruff', 'gh_url': 'https://api.github.com/users/woodruff', 'gh_html_url': 'https://github.com/woodruff', 'gh_node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/woodruff/followers', 'gh_following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/woodruff/orgs', 'gh_repos_url': 'https://api.github.com/users/woodruff/repos', 'gh_events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/woodruff/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2016-02-27T18:47:24Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008337-7a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruff@mwc-021238.dhcp.missouri.edu +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruff@mwc-021238.dhcp.missouri.edu +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6d39c903555689a9dbcfdc21b51cfeb7e6cdbfe3 +2023-01-12 12:26:57 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/6d39c903555689a9dbcfdc21b51cfeb7e6cdbfe3 +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 1744971+mbbroberg@users.noreply.github.com +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01001aa0-4b00-0000-0000-000000000000'), 'cntrb_login': 'mbbroberg', 'cntrb_created_at': '2012-05-16T08:42:21Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Minneapolis', 'cntrb_canonical': '1744971+mbbroberg@users.noreply.github.com', 'gh_user_id': 1744971, 'gh_login': 'mbbroberg', 'gh_url': 'https://api.github.com/users/mbbroberg', 'gh_html_url': 'https://github.com/mbbroberg', 'gh_node_id': 'MDQ6VXNlcjE3NDQ5NzE=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1744971?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mbbroberg/followers', 'gh_following_url': 'https://api.github.com/users/mbbroberg/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mbbroberg/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mbbroberg/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mbbroberg/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mbbroberg/orgs', 'gh_repos_url': 'https://api.github.com/users/mbbroberg/repos', 'gh_events_url': 'https://api.github.com/users/mbbroberg/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mbbroberg/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T15:19:14Z', 'cntrb_full_name': 'Matt Broberg'} +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO cntrb_id 01001aa0-4b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 1744971+mbbroberg@users.noreply.github.com +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 1744971+mbbroberg@users.noreply.github.com +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 6f327261accba5c2e905fc104b95ba8a46ae9993 +2023-01-12 12:26:58 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/6f327261accba5c2e905fc104b95ba8a46ae9993 +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 54024814+Nasmasim@users.noreply.github.com +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103385a-6e00-0000-0000-000000000000'), 'cntrb_login': 'Nasmasim', 'cntrb_created_at': '2019-08-12T10:21:22Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '54024814+Nasmasim@users.noreply.github.com', 'gh_user_id': 54024814, 'gh_login': 'Nasmasim', 'gh_url': 'https://api.github.com/users/Nasmasim', 'gh_html_url': 'https://github.com/Nasmasim', 'gh_node_id': 'MDQ6VXNlcjU0MDI0ODE0', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/54024814?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Nasmasim/followers', 'gh_following_url': 'https://api.github.com/users/Nasmasim/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Nasmasim/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Nasmasim/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Nasmasim/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Nasmasim/orgs', 'gh_repos_url': 'https://api.github.com/users/Nasmasim/repos', 'gh_events_url': 'https://api.github.com/users/Nasmasim/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Nasmasim/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-06T14:51:26Z', 'cntrb_full_name': 'Nasma'} +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103385a-6e00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 54024814+Nasmasim@users.noreply.github.com +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 54024814+Nasmasim@users.noreply.github.com +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 70f60c931f3dd764579c83032dad10f24456690f +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/70f60c931f3dd764579c83032dad10f24456690f +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:26:59 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'arunima811', 'hash': '70f60c931f3dd764579c83032dad10f24456690f', 'email_raw': 'guddan@W106JNC2X2.blr.apac.dell.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=guddan@W106JNC2X2.blr.apac.dell.com+in:email+type:user +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from guddan@W106JNC2X2.blr.apac.dell.com +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'guddan@W106JNC2X2.blr.apac.dell.com', 'name': 'arunima811'} +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 72d6e56e6025d69628aa23860b88118864a9008d +2023-01-12 12:27:00 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/72d6e56e6025d69628aa23860b88118864a9008d +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: vahuja@unomaha.edu +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101849d-5300-0000-0000-000000000000'), 'cntrb_login': 'vinodkahuja', 'cntrb_created_at': '2017-01-31T22:14:53Z', 'cntrb_email': 'vahuja@unomaha.edu', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'vahuja@unomaha.edu', 'gh_user_id': 25468243, 'gh_login': 'vinodkahuja', 'gh_url': 'https://api.github.com/users/vinodkahuja', 'gh_html_url': 'https://github.com/vinodkahuja', 'gh_node_id': 'MDQ6VXNlcjI1NDY4MjQz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/25468243?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/vinodkahuja/followers', 'gh_following_url': 'https://api.github.com/users/vinodkahuja/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/vinodkahuja/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/vinodkahuja/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/vinodkahuja/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/vinodkahuja/orgs', 'gh_repos_url': 'https://api.github.com/users/vinodkahuja/repos', 'gh_events_url': 'https://api.github.com/users/vinodkahuja/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/vinodkahuja/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-28T12:55:51Z', 'cntrb_full_name': 'Vinod K. Ahuja'} +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101849d-5300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO Creating alias for email: vahuja@unomaha.edu +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO Updating now resolved email vahuja@unomaha.edu +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 7877b0c7937c6242e18ed712d5fb14323d13115f +2023-01-12 12:27:01 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/7877b0c7937c6242e18ed712d5fb14323d13115f +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: krishans290@gmail.com +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01026a78-cf00-0000-0000-000000000000'), 'cntrb_login': 'blaze-fire', 'cntrb_created_at': '2018-06-24T03:21:46Z', 'cntrb_email': None, 'cntrb_company': 'DTU, Delhi', 'cntrb_location': 'New Delhi, India', 'cntrb_canonical': 'krishans290@gmail.com', 'gh_user_id': 40532175, 'gh_login': 'blaze-fire', 'gh_url': 'https://api.github.com/users/blaze-fire', 'gh_html_url': 'https://github.com/blaze-fire', 'gh_node_id': 'MDQ6VXNlcjQwNTMyMTc1', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/40532175?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/blaze-fire/followers', 'gh_following_url': 'https://api.github.com/users/blaze-fire/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/blaze-fire/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/blaze-fire/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/blaze-fire/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/blaze-fire/orgs', 'gh_repos_url': 'https://api.github.com/users/blaze-fire/repos', 'gh_events_url': 'https://api.github.com/users/blaze-fire/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/blaze-fire/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-28T13:23:14Z', 'cntrb_full_name': 'Krishan Singh'} +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO cntrb_id 01026a78-cf00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Creating alias for email: krishans290@gmail.com +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Updating now resolved email krishans290@gmail.com +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 79eb6a8d6f81185ea787d901ccecddc44a16a1b3 +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/79eb6a8d6f81185ea787d901ccecddc44a16a1b3 +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: 16946799+b-hodges@users.noreply.github.com +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01010296-6f00-0000-0000-000000000000'), 'cntrb_login': 'a-hodges', 'cntrb_created_at': '2016-01-28T22:33:42Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': '16946799+b-hodges@users.noreply.github.com', 'gh_user_id': 16946799, 'gh_login': 'a-hodges', 'gh_url': 'https://api.github.com/users/a-hodges', 'gh_html_url': 'https://github.com/a-hodges', 'gh_node_id': 'MDQ6VXNlcjE2OTQ2Nzk5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/16946799?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/a-hodges/followers', 'gh_following_url': 'https://api.github.com/users/a-hodges/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/a-hodges/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/a-hodges/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/a-hodges/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/a-hodges/orgs', 'gh_repos_url': 'https://api.github.com/users/a-hodges/repos', 'gh_events_url': 'https://api.github.com/users/a-hodges/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/a-hodges/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-10-19T19:45:08Z', 'cntrb_full_name': 'b-hodges'} +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO cntrb_id 01010296-6f00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Creating alias for email: 16946799+b-hodges@users.noreply.github.com +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:02 blueberry insert_facade_contributors[59440] INFO Updating now resolved email 16946799+b-hodges@users.noreply.github.com +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: cec5k7@mail.missouri.edu +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100f612-c500-0000-0000-000000000000'), 'cntrb_login': 'ChristianCme', 'cntrb_created_at': '2015-12-03T00:55:28Z', 'cntrb_email': 'ccwarn@mit.edu', 'cntrb_company': None, 'cntrb_location': 'Cambridge, MA', 'cntrb_canonical': 'ccwarn@mit.edu', 'gh_user_id': 16126661, 'gh_login': 'ChristianCme', 'gh_url': 'https://api.github.com/users/ChristianCme', 'gh_html_url': 'https://github.com/ChristianCme', 'gh_node_id': 'MDQ6VXNlcjE2MTI2NjYx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/16126661?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/ChristianCme/followers', 'gh_following_url': 'https://api.github.com/users/ChristianCme/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/ChristianCme/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/ChristianCme/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/ChristianCme/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/ChristianCme/orgs', 'gh_repos_url': 'https://api.github.com/users/ChristianCme/repos', 'gh_events_url': 'https://api.github.com/users/ChristianCme/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/ChristianCme/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-08T05:28:19Z', 'cntrb_full_name': 'Christian Cmehil-Warn'} +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100f612-c500-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO Creating alias for email: cec5k7@mail.missouri.edu +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO Updating now resolved email cec5k7@mail.missouri.edu +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 90ffce0e433c8acce5fd903497c1e94d8252c9bd +2023-01-12 12:27:03 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/90ffce0e433c8acce5fd903497c1e94d8252c9bd +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: gordonli@me.com +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010143e0-bf00-0000-0000-000000000000'), 'cntrb_login': 'gordongli', 'cntrb_created_at': '2016-08-24T17:50:01Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'San Francisco, CA', 'cntrb_canonical': 'gordonli@me.com', 'gh_user_id': 21225663, 'gh_login': 'gordongli', 'gh_url': 'https://api.github.com/users/gordongli', 'gh_html_url': 'https://github.com/gordongli', 'gh_node_id': 'MDQ6VXNlcjIxMjI1NjYz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/21225663?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/gordongli/followers', 'gh_following_url': 'https://api.github.com/users/gordongli/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/gordongli/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/gordongli/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/gordongli/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/gordongli/orgs', 'gh_repos_url': 'https://api.github.com/users/gordongli/repos', 'gh_events_url': 'https://api.github.com/users/gordongli/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/gordongli/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-07T18:06:03Z', 'cntrb_full_name': 'Gordon Li'} +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO cntrb_id 010143e0-bf00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO Creating alias for email: gordonli@me.com +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO Updating now resolved email gordonli@me.com +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 97b5c28b4efd4e516314805a23066c3d80c992ac +2023-01-12 12:27:04 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/97b5c28b4efd4e516314805a23066c3d80c992ac +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: anosikeihuoma21@gmail.com +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01043fa7-9000-0000-0000-000000000000'), 'cntrb_login': 'oma131', 'cntrb_created_at': '2020-09-14T16:22:43Z', 'cntrb_email': 'anosikeihuoma21@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'anosikeihuoma21@gmail.com', 'gh_user_id': 71280528, 'gh_login': 'oma131', 'gh_url': 'https://api.github.com/users/oma131', 'gh_html_url': 'https://github.com/oma131', 'gh_node_id': 'MDQ6VXNlcjcxMjgwNTI4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/71280528?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/oma131/followers', 'gh_following_url': 'https://api.github.com/users/oma131/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/oma131/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/oma131/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/oma131/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/oma131/orgs', 'gh_repos_url': 'https://api.github.com/users/oma131/repos', 'gh_events_url': 'https://api.github.com/users/oma131/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/oma131/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-01T10:09:03Z', 'cntrb_full_name': 'Oma Anosike'} +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO cntrb_id 01043fa7-9000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Creating alias for email: anosikeihuoma21@gmail.com +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Updating now resolved email anosikeihuoma21@gmail.com +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruff@mwc-022199.dhcp.missouri.edu +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008337-7a00-0000-0000-000000000000'), 'cntrb_login': 'woodruff', 'cntrb_created_at': '2014-08-30T15:05:47Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'michaelwoodruff@mwc-022199.dhcp.missouri.edu', 'gh_user_id': 8599418, 'gh_login': 'woodruff', 'gh_url': 'https://api.github.com/users/woodruff', 'gh_html_url': 'https://github.com/woodruff', 'gh_node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/woodruff/followers', 'gh_following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/woodruff/orgs', 'gh_repos_url': 'https://api.github.com/users/woodruff/repos', 'gh_events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/woodruff/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2016-02-27T18:47:24Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008337-7a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruff@mwc-022199.dhcp.missouri.edu +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruff@mwc-022199.dhcp.missouri.edu +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: 99f09edf2930de01ce4eeef97d0a629c37d217c4 +2023-01-12 12:27:05 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/99f09edf2930de01ce4eeef97d0a629c37d217c4 +2023-01-12 12:27:06 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: yogitab2798@gmail.com +2023-01-12 12:27:06 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01023de0-2f00-0000-0000-000000000000'), 'cntrb_login': 'Yogita98', 'cntrb_created_at': '2018-03-21T05:29:12Z', 'cntrb_email': 'yogitab2798@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Mumbai, India', 'cntrb_canonical': 'yogitab2798@gmail.com', 'gh_user_id': 37609519, 'gh_login': 'Yogita98', 'gh_url': 'https://api.github.com/users/Yogita98', 'gh_html_url': 'https://github.com/Yogita98', 'gh_node_id': 'MDQ6VXNlcjM3NjA5NTE5', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/37609519?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Yogita98/followers', 'gh_following_url': 'https://api.github.com/users/Yogita98/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Yogita98/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Yogita98/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Yogita98/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Yogita98/orgs', 'gh_repos_url': 'https://api.github.com/users/Yogita98/repos', 'gh_events_url': 'https://api.github.com/users/Yogita98/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Yogita98/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-31T12:07:37Z', 'cntrb_full_name': 'Yogita Bhatia'} +2023-01-12 12:27:06 blueberry insert_facade_contributors[59440] INFO cntrb_id 01023de0-2f00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:06 blueberry insert_facade_contributors[59440] INFO Creating alias for email: yogitab2798@gmail.com +2023-01-12 12:27:06 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:06 blueberry insert_facade_contributors[59440] INFO Updating now resolved email yogitab2798@gmail.com +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: a7a069ddb43303e6e25574fcddf1d85886c63680 +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/a7a069ddb43303e6e25574fcddf1d85886c63680 +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: qianqianshan.am@gmail.com +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100dac7-c700-0000-0000-000000000000'), 'cntrb_login': 'QianqianShan', 'cntrb_created_at': '2015-09-17T21:29:09Z', 'cntrb_email': 'qianqianshan.am@gmail.com', 'cntrb_company': 'Amazon.com Inc', 'cntrb_location': 'Seattle, WA, USA', 'cntrb_canonical': 'qianqianshan.am@gmail.com', 'gh_user_id': 14337991, 'gh_login': 'QianqianShan', 'gh_url': 'https://api.github.com/users/QianqianShan', 'gh_html_url': 'https://github.com/QianqianShan', 'gh_node_id': 'MDQ6VXNlcjE0MzM3OTkx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/14337991?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/QianqianShan/followers', 'gh_following_url': 'https://api.github.com/users/QianqianShan/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/QianqianShan/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/QianqianShan/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/QianqianShan/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/QianqianShan/orgs', 'gh_repos_url': 'https://api.github.com/users/QianqianShan/repos', 'gh_events_url': 'https://api.github.com/users/QianqianShan/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/QianqianShan/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-07-12T17:48:33Z', 'cntrb_full_name': 'Qianqian Shan'} +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100dac7-c700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Creating alias for email: qianqianshan.am@gmail.com +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Updating now resolved email qianqianshan.am@gmail.com +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: aa107bb387334543b691991482ddca8c0b97ac89 +2023-01-12 12:27:07 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/aa107bb387334543b691991482ddca8c0b97ac89 +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: yokwejuste@yahoo.com +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0104493b-dc00-0000-0000-000000000000'), 'cntrb_login': 'yokwejuste', 'cntrb_created_at': '2020-09-25T23:13:58Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Bamenda, Cameroon', 'cntrb_canonical': 'yokwejuste@yahoo.com', 'gh_user_id': 71908316, 'gh_login': 'yokwejuste', 'gh_url': 'https://api.github.com/users/yokwejuste', 'gh_html_url': 'https://github.com/yokwejuste', 'gh_node_id': 'MDQ6VXNlcjcxOTA4MzE2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/71908316?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/yokwejuste/followers', 'gh_following_url': 'https://api.github.com/users/yokwejuste/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/yokwejuste/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/yokwejuste/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/yokwejuste/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/yokwejuste/orgs', 'gh_repos_url': 'https://api.github.com/users/yokwejuste/repos', 'gh_events_url': 'https://api.github.com/users/yokwejuste/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/yokwejuste/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-06T12:46:10Z', 'cntrb_full_name': 'Steve Yonkeu'} +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO cntrb_id 0104493b-dc00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Creating alias for email: yokwejuste@yahoo.com +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Updating now resolved email yokwejuste@yahoo.com +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruff@mwc-021085.dhcp.missouri.edu +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008337-7a00-0000-0000-000000000000'), 'cntrb_login': 'woodruff', 'cntrb_created_at': '2014-08-30T15:05:47Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'michaelwoodruff@mwc-021085.dhcp.missouri.edu', 'gh_user_id': 8599418, 'gh_login': 'woodruff', 'gh_url': 'https://api.github.com/users/woodruff', 'gh_html_url': 'https://github.com/woodruff', 'gh_node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/woodruff/followers', 'gh_following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/woodruff/orgs', 'gh_repos_url': 'https://api.github.com/users/woodruff/repos', 'gh_events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/woodruff/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2016-02-27T18:47:24Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008337-7a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruff@mwc-021085.dhcp.missouri.edu +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruff@mwc-021085.dhcp.missouri.edu +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: b12a8628d6ae689f4317221cbc293e9eb7faeb8d +2023-01-12 12:27:09 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/b12a8628d6ae689f4317221cbc293e9eb7faeb8d +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: josh@agliodbs.com +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010001c1-ca00-0000-0000-000000000000'), 'cntrb_login': 'jberkus', 'cntrb_created_at': '2009-08-14T00:14:40Z', 'cntrb_email': 'jberkus@redhat.com', 'cntrb_company': 'Red Hat', 'cntrb_location': 'Portland, OR', 'cntrb_canonical': 'jberkus@redhat.com', 'gh_user_id': 115146, 'gh_login': 'jberkus', 'gh_url': 'https://api.github.com/users/jberkus', 'gh_html_url': 'https://github.com/jberkus', 'gh_node_id': 'MDQ6VXNlcjExNTE0Ng==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/115146?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jberkus/followers', 'gh_following_url': 'https://api.github.com/users/jberkus/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jberkus/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jberkus/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jberkus/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jberkus/orgs', 'gh_repos_url': 'https://api.github.com/users/jberkus/repos', 'gh_events_url': 'https://api.github.com/users/jberkus/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jberkus/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-11-01T19:13:19Z', 'cntrb_full_name': 'Josh Berkus'} +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO cntrb_id 010001c1-ca00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO Creating alias for email: josh@agliodbs.com +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO Updating now resolved email josh@agliodbs.com +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: b2d94c9c65754306a7481475e8474a979fcbe1ee +2023-01-12 12:27:10 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/b2d94c9c65754306a7481475e8474a979fcbe1ee +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'Sarah Salah', 'hash': 'b2d94c9c65754306a7481475e8474a979fcbe1ee', 'email_raw': 'sarah.salah.rizk@gmail.com', 'resolution_status': 'not_unresolved'} +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=sarah.salah.rizk@gmail.com+in:email+type:user +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from sarah.salah.rizk@gmail.com +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'sarah.salah.rizk@gmail.com', 'name': 'Sarah Salah'} +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:27:11 blueberry insert_facade_contributors[59440] INFO When searching for a contributor, we found the following users: {'login': 'Mag8sara', 'id': 42137627, 'node_id': 'MDQ6VXNlcjQyMTM3NjI3', 'avatar_url': 'https://avatars.githubusercontent.com/u/42137627?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/Mag8sara', 'html_url': 'https://github.com/Mag8sara', 'followers_url': 'https://api.github.com/users/Mag8sara/followers', 'following_url': 'https://api.github.com/users/Mag8sara/following{/other_user}', 'gists_url': 'https://api.github.com/users/Mag8sara/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/Mag8sara/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/Mag8sara/subscriptions', 'organizations_url': 'https://api.github.com/users/Mag8sara/orgs', 'repos_url': 'https://api.github.com/users/Mag8sara/repos', 'events_url': 'https://api.github.com/users/Mag8sara/events{/privacy}', 'received_events_url': 'https://api.github.com/users/Mag8sara/received_events', 'type': 'User', 'site_admin': False, 'score': 1.0} +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: sarah.salah.rizk@gmail.com +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010282f8-1b00-0000-0000-000000000000'), 'cntrb_login': 'Mag8sara', 'cntrb_created_at': '2018-08-06T09:08:58Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'Saudi Arabia ', 'cntrb_canonical': 'sarah.salah.rizk@gmail.com', 'gh_user_id': 42137627, 'gh_login': 'Mag8sara', 'gh_url': 'https://api.github.com/users/Mag8sara', 'gh_html_url': 'https://github.com/Mag8sara', 'gh_node_id': 'MDQ6VXNlcjQyMTM3NjI3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/42137627?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/Mag8sara/followers', 'gh_following_url': 'https://api.github.com/users/Mag8sara/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/Mag8sara/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/Mag8sara/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/Mag8sara/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/Mag8sara/orgs', 'gh_repos_url': 'https://api.github.com/users/Mag8sara/repos', 'gh_events_url': 'https://api.github.com/users/Mag8sara/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/Mag8sara/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-07-20T11:59:06Z', 'cntrb_full_name': 'Sarah Salah'} +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO cntrb_id 010282f8-1b00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO Creating alias for email: sarah.salah.rizk@gmail.com +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO Updating now resolved email sarah.salah.rizk@gmail.com +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: b464c6a0beb958e9a2815e3977a94901580b5f6a +2023-01-12 12:27:12 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/b464c6a0beb958e9a2815e3977a94901580b5f6a +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: gsardana001@gmail.com +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0101e430-7300-0000-0000-000000000000'), 'cntrb_login': 'GouravSardana', 'cntrb_created_at': '2017-09-07T11:58:55Z', 'cntrb_email': 'gsardana001@gmail.com', 'cntrb_company': 'Indian Open Source Foundation', 'cntrb_location': 'Bangalore', 'cntrb_canonical': 'gsardana001@gmail.com', 'gh_user_id': 31731827, 'gh_login': 'GouravSardana', 'gh_url': 'https://api.github.com/users/GouravSardana', 'gh_html_url': 'https://github.com/GouravSardana', 'gh_node_id': 'MDQ6VXNlcjMxNzMxODI3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/31731827?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/GouravSardana/followers', 'gh_following_url': 'https://api.github.com/users/GouravSardana/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/GouravSardana/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/GouravSardana/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/GouravSardana/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/GouravSardana/orgs', 'gh_repos_url': 'https://api.github.com/users/GouravSardana/repos', 'gh_events_url': 'https://api.github.com/users/GouravSardana/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/GouravSardana/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-09-24T13:48:28Z', 'cntrb_full_name': 'Gourav Sardana'} +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO cntrb_id 0101e430-7300-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO Creating alias for email: gsardana001@gmail.com +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO Updating now resolved email gsardana001@gmail.com +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: b56ef7c2175cc3d170aa80c5e31ec115448f1569 +2023-01-12 12:27:13 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/b56ef7c2175cc3d170aa80c5e31ec115448f1569 +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: aleksandrosansan@gmail.com +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010590d1-3200-0000-0000-000000000000'), 'cntrb_login': 'sashashura', 'cntrb_created_at': '2021-10-29T09:18:53Z', 'cntrb_email': 'aleksandrosansan@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'aleksandrosansan@gmail.com', 'gh_user_id': 93376818, 'gh_login': 'sashashura', 'gh_url': 'https://api.github.com/users/sashashura', 'gh_html_url': 'https://github.com/sashashura', 'gh_node_id': 'U_kgDOBZDRMg', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/93376818?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/sashashura/followers', 'gh_following_url': 'https://api.github.com/users/sashashura/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/sashashura/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/sashashura/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/sashashura/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/sashashura/orgs', 'gh_repos_url': 'https://api.github.com/users/sashashura/repos', 'gh_events_url': 'https://api.github.com/users/sashashura/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/sashashura/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-10T01:28:39Z', 'cntrb_full_name': 'Alex'} +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO cntrb_id 010590d1-3200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO Creating alias for email: aleksandrosansan@gmail.com +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO Updating now resolved email aleksandrosansan@gmail.com +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: ba545dfa4ae72c7fbeeda88e8a8687b3d798bf20 +2023-01-12 12:27:14 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/ba545dfa4ae72c7fbeeda88e8a8687b3d798bf20 +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: nicholas.griffin@bbc.co.uk +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0100b8e0-8200-0000-0000-000000000000'), 'cntrb_login': 'nicholasgriffintn', 'cntrb_created_at': '2015-04-25T20:45:52Z', 'cntrb_email': 'nick@undefined.computer', 'cntrb_company': '@bbc ', 'cntrb_location': 'London', 'cntrb_canonical': 'nick@undefined.computer', 'gh_user_id': 12116098, 'gh_login': 'nicholasgriffintn', 'gh_url': 'https://api.github.com/users/nicholasgriffintn', 'gh_html_url': 'https://github.com/nicholasgriffintn', 'gh_node_id': 'MDQ6VXNlcjEyMTE2MDk4', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/12116098?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/nicholasgriffintn/followers', 'gh_following_url': 'https://api.github.com/users/nicholasgriffintn/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/nicholasgriffintn/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/nicholasgriffintn/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/nicholasgriffintn/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/nicholasgriffintn/orgs', 'gh_repos_url': 'https://api.github.com/users/nicholasgriffintn/repos', 'gh_events_url': 'https://api.github.com/users/nicholasgriffintn/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/nicholasgriffintn/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-20T22:54:06Z', 'cntrb_full_name': 'Nicholas Griffin'} +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO cntrb_id 0100b8e0-8200-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO Creating alias for email: nicholas.griffin@bbc.co.uk +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO Updating now resolved email nicholas.griffin@bbc.co.uk +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: bb18200080dc90b068bf786f8ffa81d6df25f03d +2023-01-12 12:27:15 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/bb18200080dc90b068bf786f8ffa81d6df25f03d +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: abuhman@unomaha.edu +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01001511-1400-0000-0000-000000000000'), 'cntrb_login': 'buhmana', 'cntrb_created_at': '2012-01-25T23:12:29Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'abuhman@unomaha.edu', 'gh_user_id': 1380628, 'gh_login': 'buhmana', 'gh_url': 'https://api.github.com/users/buhmana', 'gh_html_url': 'https://github.com/buhmana', 'gh_node_id': 'MDQ6VXNlcjEzODA2Mjg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1380628?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/buhmana/followers', 'gh_following_url': 'https://api.github.com/users/buhmana/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/buhmana/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/buhmana/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/buhmana/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/buhmana/orgs', 'gh_repos_url': 'https://api.github.com/users/buhmana/repos', 'gh_events_url': 'https://api.github.com/users/buhmana/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/buhmana/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2020-03-27T16:09:23Z', 'cntrb_full_name': 'Anna'} +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO cntrb_id 01001511-1400-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO Creating alias for email: abuhman@unomaha.edu +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO Updating now resolved email abuhman@unomaha.edu +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: becc8642d8637b348511a6b8516e3666b5655897 +2023-01-12 12:27:16 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/becc8642d8637b348511a6b8516e3666b5655897 +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: shohanduttaroy99@gmail.com +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01026faa-6c00-0000-0000-000000000000'), 'cntrb_login': 'KIRA009', 'cntrb_created_at': '2018-07-05T21:38:47Z', 'cntrb_email': 'shohanduttaroy99@gmail.com', 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'shohanduttaroy99@gmail.com', 'gh_user_id': 40872556, 'gh_login': 'KIRA009', 'gh_url': 'https://api.github.com/users/KIRA009', 'gh_html_url': 'https://github.com/KIRA009', 'gh_node_id': 'MDQ6VXNlcjQwODcyNTU2', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/40872556?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/KIRA009/followers', 'gh_following_url': 'https://api.github.com/users/KIRA009/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/KIRA009/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/KIRA009/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/KIRA009/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/KIRA009/orgs', 'gh_repos_url': 'https://api.github.com/users/KIRA009/repos', 'gh_events_url': 'https://api.github.com/users/KIRA009/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/KIRA009/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-06T12:47:01Z', 'cntrb_full_name': 'Shohan'} +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO cntrb_id 01026faa-6c00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Creating alias for email: shohanduttaroy99@gmail.com +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Updating now resolved email shohanduttaroy99@gmail.com +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: c14ef9d9ee0e881fcf1ee8211e79578b56bb8634 +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/c14ef9d9ee0e881fcf1ee8211e79578b56bb8634 +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Failed to get login from commit hash +2023-01-12 12:27:17 blueberry insert_facade_contributors[59440] INFO Here is the commit: {'name': 'root', 'hash': 'c14ef9d9ee0e881fcf1ee8211e79578b56bb8634', 'email_raw': 'root@augur', 'resolution_status': 'not_unresolved'} +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO email api url https://api.github.com/search/users?q=root@augur+in:email+type:user +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from root@augur +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO Inserting data to unresolved: {'email': 'root@augur', 'name': 'root'} +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO Could not resolve the username from the email. Trying a name only search... +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO Couldn't resolve name url with given data. Reason: +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] ERROR Failed to get login from supplemental data! +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: c5622b8fab4bfbb1504f921b47a54791a7351879 +2023-01-12 12:27:18 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/c5622b8fab4bfbb1504f921b47a54791a7351879 +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: gloriaeskor@gmail.com +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0102d526-1700-0000-0000-000000000000'), 'cntrb_login': 'glowreeya-01', 'cntrb_created_at': '2019-02-11T10:45:17Z', 'cntrb_email': 'gloriaeskor@gmail.com', 'cntrb_company': None, 'cntrb_location': 'Uyo, Akwa ibom state ', 'cntrb_canonical': 'gloriaeskor@gmail.com', 'gh_user_id': 47523351, 'gh_login': 'glowreeya-01', 'gh_url': 'https://api.github.com/users/glowreeya-01', 'gh_html_url': 'https://github.com/glowreeya-01', 'gh_node_id': 'MDQ6VXNlcjQ3NTIzMzUx', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/47523351?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/glowreeya-01/followers', 'gh_following_url': 'https://api.github.com/users/glowreeya-01/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/glowreeya-01/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/glowreeya-01/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/glowreeya-01/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/glowreeya-01/orgs', 'gh_repos_url': 'https://api.github.com/users/glowreeya-01/repos', 'gh_events_url': 'https://api.github.com/users/glowreeya-01/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/glowreeya-01/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T04:07:15Z', 'cntrb_full_name': 'Glowreeyah '} +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 0102d526-1700-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: gloriaeskor@gmail.com +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email gloriaeskor@gmail.com +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mattgermonprez@Matts-MacBook-Pro-2.local +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01000a03-5000-0000-0000-000000000000'), 'cntrb_login': 'germonprez', 'cntrb_created_at': '2011-03-07T19:14:09Z', 'cntrb_email': 'germonprez@gmail.com', 'cntrb_company': 'University of Nebraska at Omaha', 'cntrb_location': 'Omaha, NE', 'cntrb_canonical': 'germonprez@gmail.com', 'gh_user_id': 656208, 'gh_login': 'germonprez', 'gh_url': 'https://api.github.com/users/germonprez', 'gh_html_url': 'https://github.com/germonprez', 'gh_node_id': 'MDQ6VXNlcjY1NjIwOA==', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/656208?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/germonprez/followers', 'gh_following_url': 'https://api.github.com/users/germonprez/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/germonprez/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/germonprez/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/germonprez/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/germonprez/orgs', 'gh_repos_url': 'https://api.github.com/users/germonprez/repos', 'gh_events_url': 'https://api.github.com/users/germonprez/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/germonprez/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-09T22:38:58Z', 'cntrb_full_name': 'Matt Germonprez'} +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO cntrb_id 01000a03-5000-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mattgermonprez@Matts-MacBook-Pro-2.local +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mattgermonprez@Matts-MacBook-Pro-2.local +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: c946eca8421f633db05fd421f0cc35a0a91dfeff +2023-01-12 12:27:19 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/c946eca8421f633db05fd421f0cc35a0a91dfeff +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: dafoster@pivotal.io +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010013e6-eb00-0000-0000-000000000000'), 'cntrb_login': 'geekygirldawn', 'cntrb_created_at': '2012-01-04T16:20:26Z', 'cntrb_email': 'fosterd@vmware.com', 'cntrb_company': 'VMware', 'cntrb_location': 'London, UK', 'cntrb_canonical': 'fosterd@vmware.com', 'gh_user_id': 1304299, 'gh_login': 'geekygirldawn', 'gh_url': 'https://api.github.com/users/geekygirldawn', 'gh_html_url': 'https://github.com/geekygirldawn', 'gh_node_id': 'MDQ6VXNlcjEzMDQyOTk=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/1304299?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/geekygirldawn/followers', 'gh_following_url': 'https://api.github.com/users/geekygirldawn/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/geekygirldawn/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/geekygirldawn/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/geekygirldawn/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/geekygirldawn/orgs', 'gh_repos_url': 'https://api.github.com/users/geekygirldawn/repos', 'gh_events_url': 'https://api.github.com/users/geekygirldawn/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/geekygirldawn/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-11T15:57:50Z', 'cntrb_full_name': 'Dawn Foster'} +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO cntrb_id 010013e6-eb00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO Creating alias for email: dafoster@pivotal.io +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO Updating now resolved email dafoster@pivotal.io +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: cb0bce873c5ff578a8b3c818f36ce7efe94a5e0e +2023-01-12 12:27:20 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/cb0bce873c5ff578a8b3c818f36ce7efe94a5e0e +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: devika25012002@gmail.com +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('0103875c-0a00-0000-0000-000000000000'), 'cntrb_login': 'devika34', 'cntrb_created_at': '2019-12-24T13:30:10Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': None, 'cntrb_canonical': 'devika25012002@gmail.com', 'gh_user_id': 59202570, 'gh_login': 'devika34', 'gh_url': 'https://api.github.com/users/devika34', 'gh_html_url': 'https://github.com/devika34', 'gh_node_id': 'MDQ6VXNlcjU5MjAyNTcw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/59202570?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/devika34/followers', 'gh_following_url': 'https://api.github.com/users/devika34/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/devika34/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/devika34/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/devika34/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/devika34/orgs', 'gh_repos_url': 'https://api.github.com/users/devika34/repos', 'gh_events_url': 'https://api.github.com/users/devika34/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/devika34/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-28T14:07:33Z', 'cntrb_full_name': 'devika34'} +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 0103875c-0a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: devika25012002@gmail.com +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email devika25012002@gmail.com +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: michaelwoodruff@mwc-022160.dhcp.missouri.edu +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01008337-7a00-0000-0000-000000000000'), 'cntrb_login': 'woodruff', 'cntrb_created_at': '2014-08-30T15:05:47Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'United States', 'cntrb_canonical': 'michaelwoodruff@mwc-022160.dhcp.missouri.edu', 'gh_user_id': 8599418, 'gh_login': 'woodruff', 'gh_url': 'https://api.github.com/users/woodruff', 'gh_html_url': 'https://github.com/woodruff', 'gh_node_id': 'MDQ6VXNlcjg1OTk0MTg=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/8599418?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/woodruff/followers', 'gh_following_url': 'https://api.github.com/users/woodruff/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/woodruff/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/woodruff/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/woodruff/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/woodruff/orgs', 'gh_repos_url': 'https://api.github.com/users/woodruff/repos', 'gh_events_url': 'https://api.github.com/users/woodruff/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/woodruff/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2016-02-27T18:47:24Z', 'cntrb_full_name': 'Michael Woodruff'} +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO cntrb_id 01008337-7a00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO Creating alias for email: michaelwoodruff@mwc-022160.dhcp.missouri.edu +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:21 blueberry insert_facade_contributors[59440] INFO Updating now resolved email michaelwoodruff@mwc-022160.dhcp.missouri.edu +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: cf83cec5a9501a3ed3848fdcaac5a940b7864591 +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/cf83cec5a9501a3ed3848fdcaac5a940b7864591 +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: jack.marak44@gmail.com +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010171de-f100-0000-0000-000000000000'), 'cntrb_login': 'jackm357', 'cntrb_created_at': '2016-11-29T03:58:14Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'USA', 'cntrb_canonical': 'jack.marak44@gmail.com', 'gh_user_id': 24239857, 'gh_login': 'jackm357', 'gh_url': 'https://api.github.com/users/jackm357', 'gh_html_url': 'https://github.com/jackm357', 'gh_node_id': 'MDQ6VXNlcjI0MjM5ODU3', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/24239857?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/jackm357/followers', 'gh_following_url': 'https://api.github.com/users/jackm357/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/jackm357/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/jackm357/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/jackm357/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/jackm357/orgs', 'gh_repos_url': 'https://api.github.com/users/jackm357/repos', 'gh_events_url': 'https://api.github.com/users/jackm357/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/jackm357/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2022-12-23T19:10:59Z', 'cntrb_full_name': 'Jack'} +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO cntrb_id 010171de-f100-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO Creating alias for email: jack.marak44@gmail.com +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:22 blueberry insert_facade_contributors[59440] INFO Updating now resolved email jack.marak44@gmail.com +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: d1d35daf0f6d4c7f2b9f6d1fae930dc8a537a914 +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/d1d35daf0f6d4c7f2b9f6d1fae930dc8a537a914 +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: mishrapratik356@gmail.com +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01029a91-cc00-0000-0000-000000000000'), 'cntrb_login': 'pratikmishra356', 'cntrb_created_at': '2018-09-28T18:55:36Z', 'cntrb_email': None, 'cntrb_company': None, 'cntrb_location': 'KOLKATA', 'cntrb_canonical': 'mishrapratik356@gmail.com', 'gh_user_id': 43684300, 'gh_login': 'pratikmishra356', 'gh_url': 'https://api.github.com/users/pratikmishra356', 'gh_html_url': 'https://github.com/pratikmishra356', 'gh_node_id': 'MDQ6VXNlcjQzNjg0MzAw', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/43684300?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/pratikmishra356/followers', 'gh_following_url': 'https://api.github.com/users/pratikmishra356/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/pratikmishra356/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/pratikmishra356/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/pratikmishra356/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/pratikmishra356/orgs', 'gh_repos_url': 'https://api.github.com/users/pratikmishra356/repos', 'gh_events_url': 'https://api.github.com/users/pratikmishra356/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/pratikmishra356/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-03T13:00:55Z', 'cntrb_full_name': 'PRATIK MISHRA'} +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO cntrb_id 01029a91-cc00-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO Creating alias for email: mishrapratik356@gmail.com +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:23 blueberry insert_facade_contributors[59440] INFO Updating now resolved email mishrapratik356@gmail.com +2023-01-12 12:27:24 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: dac0e73f5c1aa0342f1c1346124e5f13db3654b7 +2023-01-12 12:27:24 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/dac0e73f5c1aa0342f1c1346124e5f13db3654b7 +2023-01-12 12:27:25 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: amureini@redhat.com +2023-01-12 12:27:25 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('01003fb0-1900-0000-0000-000000000000'), 'cntrb_login': 'mureinik', 'cntrb_created_at': '2013-04-16T18:16:32Z', 'cntrb_email': 'mureinik@gmail.com', 'cntrb_company': 'Synopsys', 'cntrb_location': 'Tel Aviv, Israel', 'cntrb_canonical': 'mureinik@gmail.com', 'gh_user_id': 4173849, 'gh_login': 'mureinik', 'gh_url': 'https://api.github.com/users/mureinik', 'gh_html_url': 'https://github.com/mureinik', 'gh_node_id': 'MDQ6VXNlcjQxNzM4NDk=', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/4173849?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/mureinik/followers', 'gh_following_url': 'https://api.github.com/users/mureinik/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/mureinik/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/mureinik/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/mureinik/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/mureinik/orgs', 'gh_repos_url': 'https://api.github.com/users/mureinik/repos', 'gh_events_url': 'https://api.github.com/users/mureinik/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/mureinik/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2023-01-10T23:08:44Z', 'cntrb_full_name': 'Allon Murienik'} +2023-01-12 12:27:25 blueberry insert_facade_contributors[59440] INFO cntrb_id 01003fb0-1900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:25 blueberry insert_facade_contributors[59440] INFO Creating alias for email: amureini@redhat.com +2023-01-12 12:27:25 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:25 blueberry insert_facade_contributors[59440] INFO Updating now resolved email amureini@redhat.com +2023-01-12 12:27:26 blueberry insert_facade_contributors[59440] INFO Trying to create endpoint from commit hash: f99d42d42541ef2cf19bc72bfc3e960c5f4289cd +2023-01-12 12:27:26 blueberry insert_facade_contributors[59440] INFO Url: https://api.github.com/repos/chaoss/augur/commits/f99d42d42541ef2cf19bc72bfc3e960c5f4289cd +2023-01-12 12:27:29 blueberry insert_facade_contributors[59440] INFO Successfully retrieved data from github for email: anujlamoria20@gmail.com +2023-01-12 12:27:29 blueberry insert_facade_contributors[59440] INFO {'cntrb_id': UUID('010416a1-c900-0000-0000-000000000000'), 'cntrb_login': 'anujlamoria', 'cntrb_created_at': '2020-07-21T10:30:37Z', 'cntrb_email': None, 'cntrb_company': 'Student', 'cntrb_location': 'Jaipur', 'cntrb_canonical': 'anujlamoria20@gmail.com', 'gh_user_id': 68592073, 'gh_login': 'anujlamoria', 'gh_url': 'https://api.github.com/users/anujlamoria', 'gh_html_url': 'https://github.com/anujlamoria', 'gh_node_id': 'MDQ6VXNlcjY4NTkyMDcz', 'gh_avatar_url': 'https://avatars.githubusercontent.com/u/68592073?v=4', 'gh_gravatar_id': '', 'gh_followers_url': 'https://api.github.com/users/anujlamoria/followers', 'gh_following_url': 'https://api.github.com/users/anujlamoria/following{/other_user}', 'gh_gists_url': 'https://api.github.com/users/anujlamoria/gists{/gist_id}', 'gh_starred_url': 'https://api.github.com/users/anujlamoria/starred{/owner}{/repo}', 'gh_subscriptions_url': 'https://api.github.com/users/anujlamoria/subscriptions', 'gh_organizations_url': 'https://api.github.com/users/anujlamoria/orgs', 'gh_repos_url': 'https://api.github.com/users/anujlamoria/repos', 'gh_events_url': 'https://api.github.com/users/anujlamoria/events{/privacy}', 'gh_received_events_url': 'https://api.github.com/users/anujlamoria/received_events', 'gh_type': 'User', 'gh_site_admin': False, 'cntrb_last_used': '2021-11-16T11:38:37Z', 'cntrb_full_name': 'Anuj Lamoria'} +2023-01-12 12:27:29 blueberry insert_facade_contributors[59440] INFO cntrb_id 010416a1-c900-0000-0000-000000000000 found in database and assigned to enriched data +2023-01-12 12:27:29 blueberry insert_facade_contributors[59440] INFO Creating alias for email: anujlamoria20@gmail.com +2023-01-12 12:27:29 blueberry insert_facade_contributors[59440] INFO [] has type +2023-01-12 12:27:29 blueberry insert_facade_contributors[59440] INFO Updating now resolved email anujlamoria20@gmail.com +2023-01-12 12:27:32 blueberry insert_facade_contributors[59440] INFO Done with inserting and updating facade contributors +[2023-01-12 12:27:32,907: INFO/MainProcess] Task augur.tasks.github.facade_github.tasks.insert_facade_contributors[ffb189d0-7f2f-4764-b483-f49e5a72df3b] succeeded in 199.25256663100117s: None diff --git a/setup.py b/setup.py index 75f424036d..1e55a01e0f 100644 --- a/setup.py +++ b/setup.py @@ -77,6 +77,8 @@ "eventlet==0.33.1", "flower==1.2.0", "tornado==6.1", # added because it sometimes errors when tornado is not 6.1 even though nothing we install depends on it + "pylint==2.15.5", + "dnspython==2.2.1" 'Werkzeug~=2.0.0', "pylint==2.15.5" ], From 8c9ddb05ab4f44f792ed453298f401a63820aa01 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sat, 21 Jan 2023 08:09:35 -0600 Subject: [PATCH 015/134] Update setup.py fixed missing comma --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1e55a01e0f..00a88b325c 100644 --- a/setup.py +++ b/setup.py @@ -78,7 +78,7 @@ "flower==1.2.0", "tornado==6.1", # added because it sometimes errors when tornado is not 6.1 even though nothing we install depends on it "pylint==2.15.5", - "dnspython==2.2.1" + "dnspython==2.2.1", 'Werkzeug~=2.0.0', "pylint==2.15.5" ], From cb97ef9c3067a7e806ad3d724923ad48b9fc2d47 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sat, 21 Jan 2023 14:03:23 -0600 Subject: [PATCH 016/134] New Endpoint that 8knot needed (#2134) * Add repos endpoint Signed-off-by: Andrew Brain * Fix column parsing Signed-off-by: Andrew Brain * Fix issue in request loader Signed-off-by: Andrew Brain * Fix Signed-off-by: Andrew Brain * Revert "Fix" This reverts commit 10ca8265f38c7468d00dc74e295de0bc5b163963. Signed-off-by: Andrew Brain --- augur/api/routes/user.py | 44 +++++++++++++++++++++++++++++------- augur/api/view/augur_view.py | 22 ++++++++---------- 2 files changed, 45 insertions(+), 21 deletions(-) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 3f82003471..66bc7309b2 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -93,6 +93,7 @@ def validate_user(): if checkPassword == False: return jsonify({"status": "Invalid password"}) + login_user(user) return jsonify({"status": "Validated"}) @@ -436,7 +437,7 @@ def get_user_groups(): return jsonify({"status": "success", "group_names": result[0]}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/repos/ids", methods=['GET', 'POST']) + @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/repos/", methods=['GET', 'POST']) @login_required def get_user_groups_and_repos(): """Get a list of user groups and their repos @@ -450,17 +451,44 @@ def get_user_groups_and_repos(): if not development and not request.is_secure: return generate_upgrade_request() - result = current_user.get_groups() - if not result[0]: - return result[1] + columns = request.args.get("columns") + if not columns: + return {"status": "Missing argument columns"} + + # split list by , and remove whitespaces from edges + + valid_columns = [] + columns = columns.split(",") + for column in columns: + + if column.isspace() or column == "": + continue + + valid_columns.append(column.strip()) + + print(valid_columns) + - groups = result[0] data = [] + groups = current_user.groups for group in groups: - repo_ids = [repo.repo_id for repo in group.repos] - data.append({group.name : repo_ids}) - + repos = [repo.repo for repo in group.repos] + + group_repo_dicts = [] + for repo in repos: + + repo_dict = {} + for column in valid_columns: + try: + repo_dict[column] = getattr(repo, column) + except AttributeError: + return {"status": f"'{column}' is not a valid repo column"} + + group_repo_dicts.append(repo_dict) + + group_data = {"repos": group_repo_dicts, "favorited": group.favorited} + data.append({group.name: group_data}) return jsonify({"status": "success", "data": data}) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 584088f56e..6930f584c8 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -59,7 +59,6 @@ def unauthorized(): @login_manager.user_loader def load_user(user_id): - user = User.get_user(user_id) if not user: @@ -79,22 +78,19 @@ def load_user_request(request): print(f"Current time of user request: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}") token = get_bearer_token() - print(f"Bearer token: {token}") - - with DatabaseSession(logger) as session: + session = DatabaseSession(logger) - current_time = int(time.time()) - token = session.query(UserSessionToken).filter(UserSessionToken.token == token, UserSessionToken.expiration >= current_time).first() - print(f"Token: {token}") - if token: + current_time = int(time.time()) + token = session.query(UserSessionToken).filter(UserSessionToken.token == token, UserSessionToken.expiration >= current_time).first() + if token: - print("Valid user") + print("Valid user") - user = token.user - user._is_authenticated = True - user._is_active = True + user = token.user + user._is_authenticated = True + user._is_active = True - return user + return user return None From d80df62485c1c4f6197f141a7a15b3b4d76323fe Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sat, 21 Jan 2023 17:09:22 -0600 Subject: [PATCH 017/134] DatabaseEngine class for better handling of db engines (#2133) * New engine changes Signed-off-by: Andrew Brain * Merge Signed-off-by: Andrew Brain * Improve db engine handling Signed-off-by: Andrew Brain * Fix small error in engine class Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Change create_database_engine calls to DatabaseEngine objects Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Fix Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Strings with no size throw errors. ``` 2023-01-21 12:01:44 linda collect_releases[3586998] INFO Inserting release for repo with id:34729, owner:ryran, release name:059 [2023-01-21 12:01:44,417: ERROR/MainProcess] Task augur.tasks.github.releases.tasks.collect_releases[bd258a04-8b8a-498d-aace-8e4b81fbb149] raised unexpected: TypeError("unsupported operand type(s) for +: 'NoneType' and 'str'") Traceback (most recent call last): File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 451, in trace_task R = retval = fun(*args, **kwargs) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 734, in __protected_call__ return self.run(*args, **kwargs) File "/home/sean/github/rh-augur-new-dev/augur/tasks/github/releases/tasks.py", line 15, in collect_releases releases_model(session, repo.repo_git, repo.repo_id) File "/home/sean/github/rh-augur-new-dev/augur/tasks/github/releases/core.py", line 207, in releases_model insert_release(session, repo_id, data['owner'], release, True) File "/home/sean/github/rh-augur-new-dev/augur/tasks/github/releases/core.py", line 87, in insert_release release_inf = get_release_inf(session, repo_id, release, tag_only) File "/home/sean/github/rh-augur-new-dev/augur/tasks/github/releases/core.py", line 55, in get_release_inf author = name + email TypeError: unsupported operand type(s) for +: 'NoneType' and 'str' [2023-01-21 12:01:44,764: ERROR/MainProcess] Task augur.tasks.start_tasks.repo_collect_phase[82ecd03b-ed17-4150-b841-4bb75ff8bf56] raised unexpected: TypeError("unsupported operand type(s) for +: 'NoneType' and 'str'") Traceback (most recent call last): File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 451, in trace_task R = retval = fun(*args, **kwargs) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 734, in __protected_call__ return self.run(*args, **kwargs) File "/home/sean/github/rh-augur-new-dev/augur/tasks/start_tasks.py", line 107, in repo_collect_phase result.wait() File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/result.py", line 224, in get return self.backend.wait_for_pending( File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/backends/asynchronous.py", line 221, in wait_for_pending for _ in self._wait_for_pending(result, **kwargs): File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/backends/asynchronous.py", line 287, in _wait_for_pending for _ in self.drain_events_until( File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/backends/asynchronous.py", line 58, in drain_events_until on_interval() File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/vine/promises.py", line 160, in __call__ return self.throw() File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/vine/promises.py", line 157, in __call__ retval = fun(*final_args, **final_kwargs) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/result.py", line 237, in _maybe_reraise_parent_error node.maybe_throw() File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/result.py", line 609, in maybe_throw result.maybe_throw(callback=callback, propagate=propagate) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/result.py", line 336, in maybe_throw self.throw(value, self._to_remote_traceback(tb)) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/result.py", line 329, in throw self.on_ready.throw(*args, **kwargs) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/vine/promises.py", line 234, in throw reraise(type(exc), exc, tb) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/vine/utils.py", line 30, in reraise raise value TypeError: unsupported operand type(s) for +: 'NoneType' and 'str' ``` * Attempted patch for this error in augur.tasks.git.facade_tasks.trim_commits_post_analysis_facade_task ```Traceback (most recent call last): File "/home/sean/github/virtualenv/ag3/lib/python3.8/site-packages/celery/app/trace.py", line 451, in trace_task R = retval = fun(*args, **kwargs) File "/home/sean/github/virtualenv/ag3/lib/python3.8/site-packages/celery/app/trace.py", line 734, in __protected_call__ return self.run(*args, **kwargs) File "/home/sean/github/ag3/augur/tasks/git/facade_tasks.py", line 197, in trim_commits_post_analysis_facade_task all_missing_commits.extend(commits_with_repo_tuple) NameError: name 'all_missing_commits' is not defined ``` * Prevent NoneTypes with the dicitionairy.get() method Signed-off-by: Andrew Brain * Fix Signed-off-by: Andrew Brain * Fix casting Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Co-authored-by: Sean P. Goggins --- augur/api/metrics/commit.py | 9 +- augur/api/metrics/contributor.py | 13 +- augur/api/metrics/deps.py | 4 +- augur/api/metrics/insight.py | 4 +- augur/api/metrics/issue.py | 64 +++++----- augur/api/metrics/message.py | 10 +- augur/api/metrics/pull_request.py | 112 +++++++++------- augur/api/metrics/release.py | 23 ++-- augur/api/metrics/repo_meta.py | 97 ++++++++------ augur/api/metrics/toss.py | 9 +- augur/api/routes/user.py | 13 +- augur/application/cli/__init__.py | 4 +- augur/application/cli/db.py | 17 +-- augur/application/cli/user.py | 4 +- augur/application/db/engine.py | 74 +++++++---- augur/application/db/session.py | 7 +- augur/application/logs.py | 6 +- augur/application/schema/alembic/env.py | 6 +- .../data_analysis/clustering_worker/tasks.py | 8 +- .../contributor_breadth_worker.py | 8 +- .../data_analysis/discourse_analysis/tasks.py | 5 +- .../data_analysis/insight_worker/tasks.py | 120 +++++++++--------- .../data_analysis/message_insights/tasks.py | 28 ++-- .../pull_request_analysis_worker/tasks.py | 25 ++-- augur/tasks/git/facade_tasks.py | 3 + augur/tasks/github/releases/core.py | 28 ++-- augur/tasks/init/celery_app.py | 4 +- augur/util/repo_load_controller.py | 7 +- tests/test_applicaton/test_db/test_session.py | 28 +++- .../test_commit_routes_api.py | 9 ++ .../test_contributor_routes_api.py | 4 + .../test_issue_routes_api.py | 34 +++++ .../test_pull_request_routes_api.py | 3 + .../test_repo_meta_routes_api.py | 15 +++ .../test_commit_routes_data.py | 9 ++ .../test_contributor_routes_data.py | 4 + .../test_issue_routes_data.py | 34 +++++ .../test_pull_request_routes_data.py | 3 + .../test_repo_meta_routes_data.py | 17 +++ .../test_github_tasks/test_pull_requests.py | 5 +- 40 files changed, 566 insertions(+), 311 deletions(-) diff --git a/augur/api/metrics/commit.py b/augur/api/metrics/commit.py index eaa01e69a0..3bd509073e 100644 --- a/augur/api/metrics/commit.py +++ b/augur/api/metrics/commit.py @@ -8,8 +8,9 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine + +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric() def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, period='month'): @@ -167,6 +168,7 @@ def annual_commit_count_ranked_by_new_repo_in_repo_group(repo_group_id, repo_id= GROUP BY repo.repo_id, repo_name, YEAR ORDER BY YEAR ASC """.format(table, period)) + results = pd.read_sql(cdRgNewrepRankedCommitsSQL, engine, params={'repo_id': repo_id, 'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date}) return results @@ -310,6 +312,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8): results = pd.read_sql(total_commits_SQL, engine, params={'year': year, 'repo_id': repo_id}) + if not results.iloc[0]['sum']: return pd.DataFrame() @@ -353,7 +356,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8): """) results = pd.read_sql(committers_SQL, engine, - params={'year': year, 'repo_id': repo_id}) + params={'year': year, 'repo_id': repo_id}) cumsum = 0 for i, row in results.iterrows(): diff --git a/augur/api/metrics/contributor.py b/augur/api/metrics/contributor.py index bec23051f0..f5932319db 100644 --- a/augur/api/metrics/contributor.py +++ b/augur/api/metrics/contributor.py @@ -9,8 +9,9 @@ from augur.api.util import register_metric import uuid -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine + +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric() def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): @@ -43,7 +44,7 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end if repo_id: contributorsSQL = s.sql.text(""" - SELECT id::text AS user_id, + SELECT id::text AS user_id, SUM(commits) AS commits, SUM(issues) AS issues, SUM(commit_comments) AS commit_comments, @@ -130,7 +131,7 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end 'begin_date': begin_date, 'end_date': end_date}) else: contributorsSQL = s.sql.text(""" - SELECT id::text AS user_id, + SELECT id::text AS user_id, SUM(commits) AS commits, SUM(issues) AS issues, SUM(commit_comments) AS commit_comments, @@ -213,7 +214,7 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end """) results = pd.read_sql(contributorsSQL, engine, params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -283,7 +284,7 @@ def contributors_new(repo_group_id, repo_id=None, period='day', begin_date=None, """) results = pd.read_sql(contributorsNewSQL, engine, params={'repo_id': repo_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) else: contributorsNewSQL = s.sql.text(""" SELECT date_trunc(:period, b.created_at::DATE) AS date, COUNT(id) AS new_contributors, repo.repo_id, repo_name diff --git a/augur/api/metrics/deps.py b/augur/api/metrics/deps.py index 9f1a232896..ff264ed41c 100644 --- a/augur/api/metrics/deps.py +++ b/augur/api/metrics/deps.py @@ -7,8 +7,8 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric() def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/insight.py b/augur/api/metrics/insight.py index 4f443e55d9..d81b1cfa3f 100644 --- a/augur/api/metrics/insight.py +++ b/augur/api/metrics/insight.py @@ -7,8 +7,8 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric(type="repo_group_only") def top_insights(repo_group_id, num_repos=6): diff --git a/augur/api/metrics/issue.py b/augur/api/metrics/issue.py index 1ad851dd39..b20342d667 100644 --- a/augur/api/metrics/issue.py +++ b/augur/api/metrics/issue.py @@ -8,8 +8,8 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric() def issues_first_time_opened(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): @@ -78,8 +78,8 @@ def issues_first_time_opened(repo_group_id, repo_id=None, period='day', begin_da ORDER BY issue_date """) results = pd.read_sql(issueNewContributor, engine, - params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + params={'repo_group_id': repo_group_id, 'period': period, + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -124,7 +124,7 @@ def issues_first_time_closed(repo_group_id, repo_id=None, period='day', begin_da 'begin_date': begin_date, 'end_date': end_date}) else: issuesClosedSQL = s.sql.text(""" - SELECT date_trunc(:period, new_date::DATE) AS issue_date, + SELECT date_trunc(:period, new_date::DATE) AS issue_date, COUNT(cntrb_id), repo_name, repo_id FROM ( @@ -181,7 +181,7 @@ def issues_new(repo_group_id, repo_id=None, period='day', begin_date=None, end_d """) results = pd.read_sql(issues_new_SQL, engine, params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results @@ -200,7 +200,7 @@ def issues_new(repo_group_id, repo_id=None, period='day', begin_date=None, end_d """) results = pd.read_sql(issues_new_SQL, engine, params={'repo_id': repo_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -237,9 +237,8 @@ def issues_active(repo_group_id, repo_id=None, period='day', begin_date=None, en """) results = pd.read_sql(issues_active_SQL, engine, params={'repo_group_id': repo_group_id, 'period':period, - 'begin_date': begin_date, 'end_date':end_date}) - return results - + 'begin_date': begin_date, 'end_date':end_date}) + else: issues_active_SQL = s.sql.text(""" SELECT @@ -257,8 +256,8 @@ def issues_active(repo_group_id, repo_id=None, period='day', begin_date=None, en """) results = pd.read_sql(issues_active_SQL, engine, params={'repo_id': repo_id, 'period':period, - 'begin_date': begin_date, 'end_date':end_date}) - return results + 'begin_date': begin_date, 'end_date':end_date}) + return results @register_metric() def issues_closed(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): @@ -293,9 +292,7 @@ def issues_closed(repo_group_id, repo_id=None, period='day', begin_date=None, en """) results = pd.read_sql(issues_closed_SQL, engine, params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) - - return results + 'begin_date': begin_date, 'end_date': end_date}) else: issues_closed_SQL = s.sql.text(""" @@ -314,7 +311,8 @@ def issues_closed(repo_group_id, repo_id=None, period='day', begin_date=None, en results = pd.read_sql(issues_closed_SQL, engine, params={'repo_id': repo_id, 'period': period, 'begin_date': begin_date, 'end_date': end_date}) - return results + + return results @register_metric() def issue_duration(repo_group_id, repo_id=None, begin_date=None, end_date=None): @@ -351,8 +349,8 @@ def issue_duration(repo_group_id, repo_id=None, begin_date=None, end_date=None): """) results = pd.read_sql(issue_duration_SQL, engine, params={'repo_group_id': repo_group_id, - 'begin_date': begin_date, - 'end_date': end_date}) + 'begin_date': begin_date, + 'end_date': end_date}) results['duration'] = results['duration'].astype(str) return results @@ -375,8 +373,8 @@ def issue_duration(repo_group_id, repo_id=None, begin_date=None, end_date=None): """) results = pd.read_sql(issue_duration_SQL, engine, params={'repo_id': repo_id, - 'begin_date': begin_date, - 'end_date': end_date}) + 'begin_date': begin_date, + 'end_date': end_date}) results['duration'] = results['duration'].astype(str) return results @@ -421,8 +419,8 @@ def issue_participants(repo_group_id, repo_id=None, begin_date=None, end_date=No """) result = pd.read_sql(issue_participants_SQL, engine, params={'repo_group_id': repo_group_id, - 'begin_date': begin_date, - 'end_date': end_date}) + 'begin_date': begin_date, + 'end_date': end_date}) return result else: issue_participants_SQL = s.sql.text(""" @@ -449,8 +447,8 @@ def issue_participants(repo_group_id, repo_id=None, begin_date=None, end_date=No """) result = pd.read_sql(issue_participants_SQL, engine, params={'repo_id': repo_id, - 'begin_date': begin_date, - 'end_date': end_date}) + 'begin_date': begin_date, + 'end_date': end_date}) return result @register_metric() @@ -669,8 +667,9 @@ def average_issue_resolution_time(repo_group_id, repo_id=None): ORDER BY issues.repo_id """) + results = pd.read_sql(avg_issue_resolution_SQL, engine, - params={'repo_group_id': repo_group_id}) + params={'repo_group_id': repo_group_id}) return results else: @@ -686,7 +685,7 @@ def average_issue_resolution_time(repo_group_id, repo_id=None): """) results = pd.read_sql(avg_issue_resolution_SQL, engine, - params={'repo_id': repo_id}) + params={'repo_id': repo_id}) return results @register_metric() @@ -896,7 +895,7 @@ def issue_comments_mean(repo_group_id, repo_id=None, group_by='week'): raise ValueError("Incorrect value for 'group_by'") results = pd.read_sql(issue_comments_mean_std_SQL, engine, - params={'repo_group_id': repo_group_id}) + params={'repo_group_id': repo_group_id}) return results else: @@ -949,7 +948,7 @@ def issue_comments_mean(repo_group_id, repo_id=None, group_by='week'): raise ValueError("Incorrect value for 'group_by'") results = pd.read_sql(issue_comments_mean_std_SQL, engine, - params={'repo_id': repo_id}) + params={'repo_id': repo_id}) return results @register_metric() @@ -979,9 +978,10 @@ def issue_comments_mean_std(repo_group_id, repo_id=None, group_by='week'): ORDER BY repo_id, date """) + results = pd.read_sql(issue_comments_mean_std_SQL, engine, - params={'repo_group_id': repo_group_id, - 'group_by': group_by}) + params={'repo_group_id': repo_group_id, + 'group_by': group_by}) return results else: @@ -1008,7 +1008,7 @@ def issue_comments_mean_std(repo_group_id, repo_id=None, group_by='week'): """) results = pd.read_sql(issue_comments_mean_std_SQL, engine, - params={'repo_id': repo_id, 'group_by': group_by}) + params={'repo_id': repo_id, 'group_by': group_by}) return results @register_metric() @@ -1059,5 +1059,5 @@ def abandoned_issues(repo_group_id, repo_id=None, period='day', begin_date=None, ) results = pd.read_sql(abandonedSQL, engine, params={'repo_id': repo_id, 'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results diff --git a/augur/api/metrics/message.py b/augur/api/metrics/message.py index 70db2aaab3..7b97397804 100644 --- a/augur/api/metrics/message.py +++ b/augur/api/metrics/message.py @@ -9,9 +9,8 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() - +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric() def repo_messages(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): @@ -33,7 +32,6 @@ def repo_messages(repo_group_id, repo_id=None, period='day', begin_date=None, en repomessagesSQL = None - if repo_id: repomessagesSQL = s.sql.text(""" @@ -90,8 +88,8 @@ def repo_messages(repo_group_id, repo_id=None, period='day', begin_date=None, en """) results = pd.read_sql(repomessagesSQL, engine, - params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + params={'repo_group_id': repo_group_id, 'period': period, + 'begin_date': begin_date, 'end_date': end_date}) return results diff --git a/augur/api/metrics/pull_request.py b/augur/api/metrics/pull_request.py index e4bd35cc73..fd2754afa8 100644 --- a/augur/api/metrics/pull_request.py +++ b/augur/api/metrics/pull_request.py @@ -8,8 +8,9 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine + @register_metric() def pull_requests_merge_contributor_new(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): @@ -39,9 +40,11 @@ def pull_requests_merge_contributor_new(repo_group_id, repo_id=None, period='day GROUP BY cmt_author_email, repo_name ) as abc GROUP BY commit_date, repo_name """) + + results = pd.read_sql(commitNewContributor, engine, params={'repo_id': repo_id, 'period': period, - 'begin_date': begin_date, - 'end_date': end_date}) + 'begin_date': begin_date, + 'end_date': end_date}) else: commitNewContributor = s.sql.text(""" SELECT abc.repo_id, repo_name ,date_trunc(:period, new_date::DATE) as commit_date, @@ -56,10 +59,12 @@ def pull_requests_merge_contributor_new(repo_group_id, repo_id=None, period='day WHERE abc.repo_id = repo.repo_id GROUP BY abc.repo_id, repo_name, commit_date """) + + results = pd.read_sql(commitNewContributor, engine, - params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, - 'end_date': end_date}) + params={'repo_group_id': repo_group_id, 'period': period, + 'begin_date': begin_date, + 'end_date': end_date}) return results @register_metric() @@ -90,9 +95,12 @@ def pull_requests_closed_no_merge(repo_group_id, repo_id=None, period='day', beg GROUP BY closed_date, pull_request_id ORDER BY closed_date """) + + + results = pd.read_sql(closedNoMerge, engine, params={'repo_id': repo_id, 'period': period, - 'begin_date': begin_date, - 'end_date': end_date}) + 'begin_date': begin_date, + 'end_date': end_date}) else: closedNoMerge = s.sql.text(""" @@ -104,10 +112,11 @@ def pull_requests_closed_no_merge(repo_group_id, repo_id=None, period='day', beg ORDER BY closed_date """) + results = pd.read_sql(closedNoMerge, engine, - params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, - 'end_date': end_date}) + params={'repo_group_id': repo_group_id, 'period': period, + 'begin_date': begin_date, + 'end_date': end_date}) return results @register_metric() @@ -143,9 +152,10 @@ def reviews(repo_group_id, repo_id=None, period='day', begin_date=None, end_date ORDER BY pull_requests.repo_id, date """) + results = pd.read_sql(reviews_SQL, engine, - params={'period': period, 'repo_group_id': repo_group_id, - 'begin_date': begin_date, 'end_date': end_date }) + params={'period': period, 'repo_group_id': repo_group_id, + 'begin_date': begin_date, 'end_date': end_date }) return results else: @@ -163,9 +173,10 @@ def reviews(repo_group_id, repo_id=None, period='day', begin_date=None, end_date ORDER BY date """) + results = pd.read_sql(reviews_SQL, engine, - params={'period': period, 'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) + params={'period': period, 'repo_id': repo_id, + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -202,9 +213,10 @@ def reviews_accepted(repo_group_id, repo_id=None, period='day', begin_date=None, ORDER BY pull_requests.repo_id, date """) + results = pd.read_sql(reviews_accepted_SQL, engine, - params={'period': period, 'repo_group_id': repo_group_id, - 'begin_date': begin_date, 'end_date': end_date}) + params={'period': period, 'repo_group_id': repo_group_id, + 'begin_date': begin_date, 'end_date': end_date}) return results else: reviews_accepted_SQL = s.sql.text(""" @@ -223,8 +235,8 @@ def reviews_accepted(repo_group_id, repo_id=None, period='day', begin_date=None, """) results = pd.read_sql(reviews_accepted_SQL, engine, - params={'period': period, 'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) + params={'period': period, 'repo_id': repo_id, + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -261,9 +273,10 @@ def reviews_declined(repo_group_id, repo_id=None, period='day', begin_date=None, ORDER BY pull_requests.repo_id, date """) + results = pd.read_sql(reviews_declined_SQL, engine, - params={'period': period, 'repo_group_id': repo_group_id, - 'begin_date': begin_date, 'end_date': end_date }) + params={'period': period, 'repo_group_id': repo_group_id, + 'begin_date': begin_date, 'end_date': end_date }) return results else: reviews_declined_SQL = s.sql.text(""" @@ -282,8 +295,8 @@ def reviews_declined(repo_group_id, repo_id=None, period='day', begin_date=None, """) results = pd.read_sql(reviews_declined_SQL, engine, - params={'period': period, 'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) + params={'period': period, 'repo_id': repo_id, + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -320,10 +333,11 @@ def review_duration(repo_group_id, repo_id=None, begin_date=None, end_date=None) ORDER BY pull_requests.repo_id, pull_requests.pull_request_id """) + results = pd.read_sql(review_duration_SQL, engine, - params={'repo_group_id': repo_group_id, - 'begin_date': begin_date, - 'end_date': end_date}) + params={'repo_group_id': repo_group_id, + 'begin_date': begin_date, + 'end_date': end_date}) results['duration'] = results['duration'].astype(str) return results else: @@ -344,9 +358,9 @@ def review_duration(repo_group_id, repo_id=None, begin_date=None, end_date=None) """) results = pd.read_sql(review_duration_SQL, engine, - params={'repo_id': repo_id, - 'begin_date': begin_date, - 'end_date': end_date}) + params={'repo_id': repo_id, + 'begin_date': begin_date, + 'end_date': end_date}) results['duration'] = results['duration'].astype(str) return results @@ -395,6 +409,7 @@ def pull_request_acceptance_rate(repo_group_id, repo_id=None, begin_date=None, e ) opened ON opened.date_created = accepted.accepted_on """) + results = pd.read_sql(prAccRateSQL, engine, params={'repo_group_id': repo_group_id, 'group_by': group_by, 'begin_date': begin_date, 'end_date': end_date}) return results @@ -427,8 +442,9 @@ def pull_request_acceptance_rate(repo_group_id, repo_id=None, begin_date=None, e ) opened ON opened.date_created = accepted.accepted_on """) + results = pd.read_sql(prAccRateSQL, engine, params={'repo_id': repo_id, 'group_by': group_by, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -530,9 +546,11 @@ def pull_request_average_time_to_close(repo_group_id, repo_id=None, group_by='mo ORDER BY merged_status """) - pr_all = pd.read_sql(pr_all_SQL, engine, - params={'repo_id': repo_id, 'repo_group_id':repo_group_id, - 'begin_date': begin_date, 'end_date': end_date}) + + + pr_all = pd.read_sql(pr_all_SQL, engine, + params={'repo_id': repo_id, 'repo_group_id':repo_group_id, + 'begin_date': begin_date, 'end_date': end_date}) if not repo_id: pr_avg_time_to_close = pr_all.groupby(['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys).mean().reset_index()[['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys + ['average_{}_to_close'.format(time_unit)]] else: @@ -641,9 +659,10 @@ def pull_request_merged_status_counts(repo_group_id, repo_id=None, begin_date='1 GROUP BY closed_year, closed_month, merged_status, time_between_responses.pr_closed_at, time_between_responses.average_time_between_responses """) - pr_all = pd.read_sql(pr_all_SQL, engine, - params={'repo_id': repo_id, 'repo_group_id':repo_group_id, - 'begin_date': begin_date, 'end_date': end_date}) + + pr_all = pd.read_sql(pr_all_SQL, engine, + params={'repo_id': repo_id, 'repo_group_id':repo_group_id, + 'begin_date': begin_date, 'end_date': end_date}) if not repo_id: pr_avg_time_between_responses = pr_all.groupby(['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys).mean().reset_index()[['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys + ['average_{}_between_responses'.format(time_unit)]] else: @@ -750,9 +769,10 @@ def pull_request_average_commit_counts(repo_group_id, repo_id=None, group_by='mo GROUP BY closed_year, merged_status, data.pr_closed_at, data.commit_count """) - pr_all = pd.read_sql(pr_all_SQL, engine, - params={'repo_id': repo_id, 'repo_group_id':repo_group_id, - 'begin_date': begin_date, 'end_date': end_date}) + + pr_all = pd.read_sql(pr_all_SQL, engine, + params={'repo_id': repo_id, 'repo_group_id':repo_group_id, + 'begin_date': begin_date, 'end_date': end_date}) if not repo_id: pr_avg_commit_counts = pr_all.groupby(['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys).mean().reset_index()[['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys + ['average_commits_per_pull_request']] else: @@ -908,9 +928,10 @@ def pull_request_average_event_counts(repo_group_id, repo_id=None, group_by='mon ORDER BY merged_status, closed_year, closed_week, closed_day """) - pr_all = pd.read_sql(pr_all_SQL, engine, - params={'repo_id': repo_id, 'repo_group_id':repo_group_id, - 'begin_date': begin_date, 'end_date': end_date}) + + pr_all = pd.read_sql(pr_all_SQL, engine, + params={'repo_id': repo_id, 'repo_group_id':repo_group_id, + 'begin_date': begin_date, 'end_date': end_date}) count_names = ['assigned_count', 'review_requested_count', 'labeled_count', 'unlabeled_count', 'subscribed_count', 'mentioned_count', 'referenced_count', 'closed_count', 'head_ref_force_pushed_count', 'head_ref_deleted_count', 'milestoned_count', 'merged_count', 'comment_count'] average_count_names = [] @@ -1113,8 +1134,9 @@ def pull_request_merged_status_counts(repo_group_id, repo_id=None, begin_date='1 AND pr_closed_at::date <= :end_date ::date """) - pr_all = pd.read_sql(pr_all_sql, engine, params={'repo_group_id': repo_group_id, - 'repo_id': repo_id, 'begin_date': begin_date, 'end_date': end_date}) + + pr_all = pd.read_sql(pr_all_sql, engine, params={'repo_group_id': repo_group_id, + 'repo_id': repo_id, 'begin_date': begin_date, 'end_date': end_date}) if not repo_id: pr_merged_counts = pr_all.groupby(['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys).count().reset_index()[['merged_status', 'repo_id', 'repo_name', 'repo_group_id', 'repo_group_name'] + time_group_bys + ['pull_request_count']] diff --git a/augur/api/metrics/release.py b/augur/api/metrics/release.py index db6cce76dc..08dfb2f0a7 100644 --- a/augur/api/metrics/release.py +++ b/augur/api/metrics/release.py @@ -8,8 +8,8 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric() def releases(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): @@ -51,9 +51,10 @@ def releases(repo_group_id, repo_id=None, period='day', begin_date=None, end_dat ORDER BY releases.release_published_at DESC """) + results = pd.read_sql(releases_SQL, engine, - params={'period': period, 'repo_group_id': repo_group_id, - 'begin_date': begin_date, 'end_date': end_date }) + params={'period': period, 'repo_group_id': repo_group_id, + 'begin_date': begin_date, 'end_date': end_date }) return results else: @@ -80,9 +81,10 @@ def releases(repo_group_id, repo_id=None, period='day', begin_date=None, end_dat ORDER BY releases.release_published_at DESC """) + results = pd.read_sql(releases_SQL, engine, - params={'period': period, 'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) + params={'period': period, 'repo_id': repo_id, + 'begin_date': begin_date, 'end_date': end_date}) return results @register_metric() @@ -126,9 +128,10 @@ def tag_only_releases(repo_group_id, repo_id=None, period='day', begin_date=None ORDER BY releases.release_published_at DESC """) + results = pd.read_sql(releases_SQL, engine, - params={'period': period, 'repo_group_id': repo_group_id, - 'begin_date': begin_date, 'end_date': end_date }) + params={'period': period, 'repo_group_id': repo_group_id, + 'begin_date': begin_date, 'end_date': end_date }) return results else: @@ -149,8 +152,8 @@ def tag_only_releases(repo_group_id, repo_id=None, period='day', begin_date=None """) results = pd.read_sql(releases_SQL, engine, - params={'period': period, 'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) + params={'period': period, 'repo_id': repo_id, + 'begin_date': begin_date, 'end_date': end_date}) return results def create_release_metrics(metrics): diff --git a/augur/api/metrics/repo_meta.py b/augur/api/metrics/repo_meta.py index 53a8d38baa..b20d3421f1 100644 --- a/augur/api/metrics/repo_meta.py +++ b/augur/api/metrics/repo_meta.py @@ -11,8 +11,8 @@ from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine logger = logging.getLogger("augur") @@ -48,8 +48,9 @@ def code_changes(repo_group_id, repo_id=None, period='week', begin_date=None, en ORDER BY week """) + results = pd.read_sql(code_changes_SQL, engine, params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) results['week'] = results['week'].apply(lambda x: x - 1) results['date'] = results['year'].astype(str) + ' ' + results['week'].astype(str) + ' 0' results['date'] = results['date'].apply(lambda x: datetime.datetime.strptime(x, "%Y %W %w")) @@ -69,8 +70,9 @@ def code_changes(repo_group_id, repo_id=None, period='week', begin_date=None, en ORDER BY week """) + results = pd.read_sql(code_changes_SQL, engine, params={'repo_id': repo_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) results['week'] = results['week'].apply(lambda x: x - 1) results['date'] = results['year'].astype(str) + ' ' + results['week'].astype(str) + ' 0' @@ -112,7 +114,7 @@ def code_changes_lines(repo_group_id, repo_id=None, period='day', begin_date=Non """) results = pd.read_sql(code_changes_lines_SQL, engine, params={'repo_group_id': repo_group_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results @@ -130,8 +132,9 @@ def code_changes_lines(repo_group_id, repo_id=None, period='day', begin_date=Non ORDER BY date; """) + results = pd.read_sql(code_changes_lines_SQL, engine, params={'repo_id': repo_id, 'period': period, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) return results @@ -150,29 +153,30 @@ def sub_projects(repo_group_id, repo_id=None, begin_date=None, end_date=None): if not end_date: end_date = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') - if repo_id: - sub_projectsSQL = s.sql.text(""" - SELECT COUNT(*) AS sub_project_count - FROM repo - WHERE repo_group_id = ( - SELECT repo_group_id - FROM repo - WHERE repo_id = :repo_id) - AND repo_added BETWEEN :begin_date AND :end_date - """) + + if repo_id: + sub_projectsSQL = s.sql.text(""" + SELECT COUNT(*) AS sub_project_count + FROM repo + WHERE repo_group_id = ( + SELECT repo_group_id + FROM repo + WHERE repo_id = :repo_id) + AND repo_added BETWEEN :begin_date AND :end_date + """) - results = pd.read_sql(sub_projectsSQL, engine, params={'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) - else: - sub_projectsSQL = s.sql.text(""" - SELECT COUNT(*) AS sub_project_count - FROM repo - WHERE repo_group_id = :repo_group_id - AND repo_added BETWEEN :begin_date AND :end_date - """) + results = pd.read_sql(sub_projectsSQL, engine, params={'repo_id': repo_id, + 'begin_date': begin_date, 'end_date': end_date}) + else: + sub_projectsSQL = s.sql.text(""" + SELECT COUNT(*) AS sub_project_count + FROM repo + WHERE repo_group_id = :repo_group_id + AND repo_added BETWEEN :begin_date AND :end_date + """) - results = pd.read_sql(sub_projectsSQL, engine, params={'repo_group_id': repo_group_id, - 'begin_date': begin_date, 'end_date': end_date}) + results = pd.read_sql(sub_projectsSQL, engine, params={'repo_group_id': repo_group_id, + 'begin_date': begin_date, 'end_date': end_date}) return results @@ -192,6 +196,7 @@ def sbom_download(repo_group_id, repo_id=None): logger.debug(dosocs_SQL) params = {'repo_id': repo_id} + return pd.read_sql(dosocs_SQL, engine, params=params) #return [json.dumps(license_information)] @@ -260,6 +265,7 @@ def forks(repo_group_id, repo_id=None): ORDER BY repo_info.repo_id, date """) + results = pd.read_sql(forks_SQL, engine, params={'repo_group_id': repo_group_id}) return results @@ -274,6 +280,7 @@ def forks(repo_group_id, repo_id=None): ORDER BY date """) + results = pd.read_sql(forks_SQL, engine, params={'repo_id': repo_id}) return results @@ -298,6 +305,7 @@ def fork_count(repo_group_id, repo_id=None): WHERE repo_group_id = :repo_group_id) """) + results = pd.read_sql(fork_count_SQL, engine, params={'repo_group_id': repo_group_id}) return results else: @@ -309,6 +317,7 @@ def fork_count(repo_group_id, repo_id=None): LIMIT 1 """) + results = pd.read_sql(fork_count_SQL, engine, params={'repo_id': repo_id}) return results @@ -337,6 +346,7 @@ def languages(repo_group_id, repo_id=None): WHERE repo_id = :repo_id """) + results = pd.read_sql(languages_SQL, engine, params={'repo_id': repo_id}) return results @@ -587,9 +597,10 @@ def license_count(repo_group_id, repo_id=None): GROUP BY a.name, a.number_of_license, a.licensed, b.total """) - results = pd.read_sql(license_declared_SQL, engine, params={'repo_id': repo_id, 'repo_group_id':repo_group_id}) + + results = pd.read_sql(license_declared_SQL, engine, params={'repo_id': repo_id, 'repo_group_id':repo_group_id}) - return results + return results @register_metric() @@ -615,6 +626,7 @@ def stars(repo_group_id, repo_id=None): ORDER BY repo_info.repo_id, date """) + results = pd.read_sql(stars_SQL, engine, params={'repo_group_id': repo_group_id}) return results @@ -629,8 +641,8 @@ def stars(repo_group_id, repo_id=None): ORDER BY date """) - results = pd.read_sql(stars_SQL, engine, params={'repo_id': repo_id}) - return results + results = pd.read_sql(stars_SQL, engine, params={'repo_id': repo_id}) + return results @register_metric() def stars_count(repo_group_id, repo_id=None): @@ -653,6 +665,7 @@ def stars_count(repo_group_id, repo_id=None): WHERE repo_group_id = :repo_group_id) """) + results = pd.read_sql(stars_count_SQL, engine, params={'repo_group_id': repo_group_id}) return results else: @@ -690,6 +703,7 @@ def watchers(repo_group_id, repo_id=None): ORDER BY repo_info.repo_id, date """) + results = pd.read_sql(watchers_SQL, engine, params={'repo_group_id': repo_group_id}) return results @@ -704,6 +718,7 @@ def watchers(repo_group_id, repo_id=None): ORDER BY date """) + results = pd.read_sql(watchers_SQL, engine, params={'repo_id': repo_id}) return results @@ -728,6 +743,7 @@ def watchers_count(repo_group_id, repo_id=None): WHERE repo_group_id = :repo_group_id) """) + results = pd.read_sql(watchers_count_SQL, engine, params={'repo_group_id': repo_group_id}) return results else: @@ -739,6 +755,7 @@ def watchers_count(repo_group_id, repo_id=None): LIMIT 1 """) + results = pd.read_sql(watchers_count_SQL, engine, params={'repo_id': repo_id}) return results @@ -782,8 +799,9 @@ def annual_lines_of_code_count_ranked_by_new_repo_in_repo_group(repo_group_id, r ORDER BY net desc LIMIT 10 """) - results = pd.read_sql(cdRgNewrepRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, - "repo_id": repo_id, "calendar_year": calendar_year}) + + results = pd.read_sql(cdRgNewrepRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, + "repo_id": repo_id, "calendar_year": calendar_year}) return results @register_metric() @@ -877,9 +895,10 @@ def annual_lines_of_code_count_ranked_by_repo_in_repo_group(repo_group_id, repo_ LIMIT 10 """) + - results = pd.read_sql(cdRgTpRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, - "repo_id": repo_id}) + results = pd.read_sql(cdRgTpRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, + "repo_id": repo_id}) return results @register_metric() @@ -931,7 +950,8 @@ def lines_of_code_commit_counts_by_calendar_year_grouped(repo_url, calendar_year GROUP BY week """) - results = pd.read_sql(cdRepTpIntervalLocCommitsSQL, engine, params={"repourl": '%{}%'.format(repo_url), 'calendar_year': calendar_year}) + + results = pd.read_sql(cdRepTpIntervalLocCommitsSQL, engine, params={"repourl": '%{}%'.format(repo_url), 'calendar_year': calendar_year}) return results @register_metric() @@ -951,8 +971,9 @@ def average_weekly_commits(repo_group_id=None, repo_id=None, calendar_year=None) ORDER BY repo_name """.format(extra_and)) + results = pd.read_sql(average_weekly_commits_sql, engine, params={"repo_group_id": repo_group_id, - "repo_id": repo_id, "calendar_year": calendar_year}) + "repo_id": repo_id, "calendar_year": calendar_year}) return results @register_metric() @@ -1034,6 +1055,7 @@ def aggregate_summary(repo_group_id, repo_id=None, begin_date=None, end_date=Non ) temp ) commit_data """) + results = pd.read_sql(summarySQL, engine, params={'repo_group_id': repo_group_id, 'begin_date': begin_date, 'end_date': end_date}) return results @@ -1102,6 +1124,7 @@ def aggregate_summary(repo_group_id, repo_id=None, begin_date=None, end_date=Non ) temp ) commit_data """) + results = pd.read_sql(summarySQL, engine, params={'repo_id': repo_id, 'begin_date': begin_date, 'end_date': end_date}) return results diff --git a/augur/api/metrics/toss.py b/augur/api/metrics/toss.py index 94ae9f1f3a..71af07cd5a 100644 --- a/augur/api/metrics/toss.py +++ b/augur/api/metrics/toss.py @@ -4,8 +4,8 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine +engine = DatabaseEngine(connection_pool_size=1).engine @register_metric(type="toss") def toss_pull_request_acceptance_rate(repo_id, begin_date=None, end_date=None, group_by='week'): @@ -57,6 +57,7 @@ def toss_pull_request_acceptance_rate(repo_id, begin_date=None, end_date=None, g repo_id ) opened ON merged.repo_id = opened.repo_id """) + results = pd.read_sql(pr_acceptance_rate_sql, engine, params={'repo_id': repo_id, 'group_by': group_by, 'begin_date': begin_date, 'end_date': end_date}) return results @@ -88,8 +89,9 @@ def toss_review_duration(repo_id, begin_date=None, end_date=None): AND pr_created_at BETWEEN :begin_date AND :end_date """) + results = pd.read_sql(pr_acceptance_rate_sql, engine, params={'repo_id': repo_id, - 'begin_date': begin_date, 'end_date': end_date}) + 'begin_date': begin_date, 'end_date': end_date}) if results.iloc[0]['duration'] is None: results.iloc[0]['duration'] = -1 else: @@ -118,5 +120,6 @@ def toss_repo_info(repo_id): repo_info.data_collection_date DESC LIMIT 1; """) + results = pd.read_sql(license_file_sql, engine, params={'repo_id': repo_id}) return results diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 66bc7309b2..917b419f33 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -29,8 +29,8 @@ logger = logging.getLogger(__name__) development = get_development_flag() -from augur.application.db.engine import create_database_engine -Session = sessionmaker(bind=create_database_engine()) +from augur.application.db.engine import DatabaseEngine +Session = sessionmaker(bind=DatabaseEngine().engine) from augur.api.routes import AUGUR_API_VERSION @@ -78,14 +78,17 @@ def validate_user(): if not development and not request.is_secure: return generate_upgrade_request() - session = Session() + username = request.args.get("username") password = request.args.get("password") if username is None or password is None: # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400 return jsonify({"status": "Missing argument"}), 400 + session = Session() user = session.query(User).filter(User.login_name == username).first() + session.close() + if user is None: return jsonify({"status": "Invalid username"}) @@ -243,15 +246,18 @@ def update_user(): if email is not None: existing_user = session.query(User).filter(User.email == email).one() if existing_user is not None: + session = Session() return jsonify({"status": "Already an account with this email"}) current_user.email = email session.commit() + session = Session() return jsonify({"status": "Email Updated"}) if new_password is not None: current_user.login_hashword = generate_password_hash(new_password) session.commit() + session = Session() return jsonify({"status": "Password Updated"}) if new_login_name is not None: @@ -261,6 +267,7 @@ def update_user(): current_user.login_name = new_login_name session.commit() + session = Session() return jsonify({"status": "Username Updated"}) return jsonify({"status": "Missing argument"}), 400 diff --git a/augur/application/cli/__init__.py b/augur/application/cli/__init__.py index 67e64003ce..1f9fe1ef09 100644 --- a/augur/application/cli/__init__.py +++ b/augur/application/cli/__init__.py @@ -7,7 +7,7 @@ import re import json -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from sqlalchemy.exc import OperationalError @@ -28,7 +28,7 @@ def new_func(ctx, *args, **kwargs): def test_db_connection(function_db_connection): @click.pass_context def new_func(ctx, *args, **kwargs): - engine = create_database_engine() + engine = DatabaseEngine().engine usage = re.search(r"Usage:\s(.*)\s\[OPTIONS\]", str(ctx.get_usage())).groups()[0] try: engine.connect() diff --git a/augur/application/cli/db.py b/augur/application/cli/db.py index ebf3603c43..8506f890ce 100644 --- a/augur/application/cli/db.py +++ b/augur/application/cli/db.py @@ -21,7 +21,7 @@ from augur.application.db.session import DatabaseSession from augur.application.logs import AugurLogger -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine logger = logging.getLogger(__name__) @@ -75,8 +75,7 @@ def get_repo_groups(): List all repo groups and their associated IDs """ - engine = create_database_engine() - with engine.connect() as connection: + with DatabaseEngine as engine, engine.connect() as connection: df = pd.read_sql( s.sql.text( "SELECT repo_group_id, rg_name, rg_description FROM augur_data.repo_groups" @@ -97,8 +96,7 @@ def add_repo_groups(filename): """ Create new repo groups in Augur's database """ - engine = create_database_engine() - with engine.connect() as connection: + with DatabaseEngine as engine, engine.connect() as connection: df = pd.read_sql( s.sql.text("SELECT repo_group_id FROM augur_data.repo_groups"), @@ -163,8 +161,7 @@ def get_db_version(): """ ) - engine = create_database_engine() - with engine.connect() as connection: + with DatabaseEngine as engine, engine.connect() as connection: result = int(connection.execute(db_version_sql).fetchone()[2]) @@ -248,8 +245,7 @@ def update_api_key(api_key): """ ) - engine = create_database_engine() - with engine.connect() as connection: + with DatabaseEngine as engine, engine.connect() as connection: connection.execute(update_api_key_sql, api_key=api_key) logger.info(f"Updated Augur API key to: {api_key}") @@ -268,8 +264,7 @@ def get_api_key(): ) try: - engine = create_database_engine() - with engine.connect() as connection: + with DatabaseEngine as engine, engine.connect() as connection: print(connection.execute(get_api_key_sql).fetchone()[0]) except TypeError: print("No Augur API key found.") diff --git a/augur/application/cli/user.py b/augur/application/cli/user.py index d3e014ec29..e2846d5f37 100644 --- a/augur/application/cli/user.py +++ b/augur/application/cli/user.py @@ -10,11 +10,11 @@ import logging from werkzeug.security import generate_password_hash from augur.application.db.models import User -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from sqlalchemy.orm import sessionmaker -engine = create_database_engine() +engine = DatabaseEngine().engine Session = sessionmaker(bind=engine) logger = logging.getLogger(__name__) diff --git a/augur/application/db/engine.py b/augur/application/db/engine.py index a7b31ea984..e734224348 100644 --- a/augur/application/db/engine.py +++ b/augur/application/db/engine.py @@ -5,6 +5,8 @@ import logging import inspect from sqlalchemy import create_engine, event +from sqlalchemy.engine.base import Engine +from sqlalchemy.pool import NullPool from augur.application.logs import initialize_stream_handler from augur.application.db.util import catch_operational_error @@ -47,36 +49,64 @@ def get_database_string() -> str: return db_conn_string +class DatabaseEngine(): -def create_database_engine(): - """Create sqlalchemy database engine + def __init__(self, connection_pool_size=5): - Note: - A new database engine is created each time the function is called + self._engine = self.create_database_engine(connection_pool_size) - Returns: - sqlalchemy database engine - """ - # curframe = inspect.currentframe() - # calframe = inspect.getouterframes(curframe, 2) - # print('file name:', calframe[1][1]) - # print('function name:', calframe[1][3]) + def __enter__(self): + return self._engine + + + def __exit__(self, exception_type, exception_value, exception_traceback): + + self._engine.dispose() + + def dispose(self): + self._engine.dispose() + + @property + def engine(self): + return self._engine + + + def create_database_engine(self, connection_pool_size): + """Create sqlalchemy database engine + + Note: + A new database engine is created each time the function is called + + Returns: + sqlalchemy database engine + """ + + # curframe = inspect.currentframe() + # calframe = inspect.getouterframes(curframe, 2) + # print('file name:', calframe[1][1]) + # print('function name:', calframe[1][3]) + + db_conn_string = get_database_string() - db_conn_string = get_database_string() + if connection_pool_size == 1: + engine = create_engine(db_conn_string, poolclass=NullPool) - engine = create_engine(db_conn_string) + elif connection_pool_size < 0: + raise Exception(f"Invalid Pool Size: {connection_pool_size}") + else: + engine = create_engine(db_conn_string, pool_size=connection_pool_size) - @event.listens_for(engine, "connect", insert=True) - def set_search_path(dbapi_connection, connection_record): - existing_autocommit = dbapi_connection.autocommit - dbapi_connection.autocommit = True - cursor = dbapi_connection.cursor() - cursor.execute("SET SESSION search_path=public,augur_data,augur_operations,spdx") - cursor.close() - dbapi_connection.autocommit = existing_autocommit + @event.listens_for(engine, "connect", insert=True) + def set_search_path(dbapi_connection, connection_record): + existing_autocommit = dbapi_connection.autocommit + dbapi_connection.autocommit = True + cursor = dbapi_connection.cursor() + cursor.execute("SET SESSION search_path=public,augur_data,augur_operations,spdx") + cursor.close() + dbapi_connection.autocommit = existing_autocommit - return engine + return engine class EngineConnection(): diff --git a/augur/application/db/session.py b/augur/application/db/session.py index 5330113ad5..1d4901b263 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -60,11 +60,11 @@ def __init__(self, logger, engine=None): self.engine_created = False if self.engine is None: - from augur.application.db.engine import create_database_engine + from augur.application.db.engine import DatabaseEngine self.engine_created = True - self.engine = create_database_engine() + self.engine = DatabaseEngine().engine super().__init__(self.engine) @@ -77,6 +77,9 @@ def __exit__(self, exception_type, exception_value, exception_traceback): self.engine.dispose() self.close() + + def __del__(self): + self.close() def execute_sql(self, sql_text): diff --git a/augur/application/logs.py b/augur/application/logs.py index b8b23da95c..2c976c2af7 100644 --- a/augur/application/logs.py +++ b/augur/application/logs.py @@ -76,13 +76,13 @@ def initialize_stream_handler(logger, log_level): def get_log_config(): - from augur.application.db.engine import create_database_engine + from augur.application.db.engine import DatabaseEngine # we are using this session instead of the # DatabaseSession class because the DatabaseSession # class requires a logger, and we are setting up logger thigns here - engine = create_database_engine() - session = Session(engine) + with DatabaseEngine() as engine: + session = Session(engine) query = session.query(Config).filter_by(section_name="Logging") section_data = execute_session_query(query, 'all') diff --git a/augur/application/schema/alembic/env.py b/augur/application/schema/alembic/env.py index 98531c4083..d170ef243f 100644 --- a/augur/application/schema/alembic/env.py +++ b/augur/application/schema/alembic/env.py @@ -5,8 +5,7 @@ from alembic import context from augur.application.db.models.base import Base -from augur.application.db.engine import create_database_engine -engine = create_database_engine() +from augur.application.db.engine import DatabaseEngine # this is the Alembic Config object, which provides # access to the values within the .ini file in use. @@ -60,9 +59,8 @@ def run_migrations_online(): and associate a connection with the context. """ - connectable = engine - with connectable.connect() as connection: + with DatabaseEngine() as connectable, connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata, diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index deb7a21c78..36c4e5f08f 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -22,7 +22,7 @@ from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, RepoClusterMessage, RepoTopic, TopicWord -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query @@ -109,7 +109,8 @@ def clustering_model(repo_git: str) -> None: """ ) # result = db.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, create_database_engine(), params={"repo_id": repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, engine, params={"repo_id": repo_id}) logger.info(msg_df_cur_repo.head()) logger.debug(f"Repo message df size: {len(msg_df_cur_repo.index)}") @@ -298,7 +299,8 @@ def visualize_labels_PCA(features, labels, annotations, num_components, title): AND prmr.msg_id=m.msg_id """ ) - msg_df_all = pd.read_sql(get_messages_sql, create_database_engine(), params={}) + with DatabaseEngine(connection_pool_size=1) as engine: + msg_df_all = pd.read_sql(get_messages_sql, engine, params={}) # select only highly active repos logger.debug("Selecting highly active repos") diff --git a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py index 780ccd1a2d..907356da79 100644 --- a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py +++ b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py @@ -7,7 +7,7 @@ from augur.application.db.session import DatabaseSession from augur.tasks.github.util.github_paginator import GithubPaginator from augur.application.db.models import ContributorRepo -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine ### This worker scans all the platform users in Augur, and pulls their platform activity ### logs. Those are then used to analyze what repos each is working in (which will include repos not @@ -43,7 +43,8 @@ def contributor_breadth_model() -> None: WHERE gh_login IS NOT NULL """) - current_cntrb_logins = json.loads(pd.read_sql(cntrb_login_query, create_database_engine(), params={}).to_json(orient="records")) + with DatabaseEngine(connection_pool_size=1) as engine: + current_cntrb_logins = json.loads(pd.read_sql(cntrb_login_query, engine, params={}).to_json(orient="records")) ## We need a list of all contributors so we can iterate through them to gather events ## We need a list of event ids to avoid insertion of duplicate events. We ignore the event @@ -84,7 +85,8 @@ def contributor_breadth_model() -> None: WHERE 1 = 1 """) - current_event_ids = json.loads(pd.read_sql(dup_query, create_database_engine(), params={}).to_json(orient="records")) + with DatabaseEngine(connection_pool_size=1) as engine: + current_event_ids = json.loads(pd.read_sql(dup_query, engine, params={}).to_json(orient="records")) #Convert list of dictionaries to regular list of 'event_ids'. #The only values that the sql query returns are event_ids so diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index 8328e31c13..1a3acb0fc1 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -9,7 +9,7 @@ from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, DiscourseInsight -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query #import os, sys, time, requests, json @@ -64,7 +64,8 @@ def discourse_analysis_model(repo_git: str) -> None: """) # result = db.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, create_database_engine(), params={"repo_id": repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, engine, params={"repo_id": repo_id}) msg_df_cur_repo = msg_df_cur_repo.sort_values(by=['thread_id']).reset_index(drop=True) logger.info(msg_df_cur_repo.head()) diff --git a/augur/tasks/data_analysis/insight_worker/tasks.py b/augur/tasks/data_analysis/insight_worker/tasks.py index f97f11271d..60990f9701 100644 --- a/augur/tasks/data_analysis/insight_worker/tasks.py +++ b/augur/tasks/data_analysis/insight_worker/tasks.py @@ -16,12 +16,11 @@ from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, ChaossMetricStatus, RepoInsight, RepoInsightsRecord -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query warnings.filterwarnings('ignore') -engine = create_database_engine() @celery.task def insight_model(repo_git: str) -> None: @@ -96,46 +95,46 @@ def insight_model(repo_git: str) -> None: return """ Deletion of old insights """ - - # Delete previous insights not in the anomaly_days param - min_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) - logger.info("MIN DATE: {}\n".format(min_date)) - logger.info("Deleting out of date records ...\n") - delete_record_SQL = s.sql.text(""" - DELETE - FROM - repo_insights_records - WHERE - repo_id = :repo_id - AND ri_date < :min_date - """) - result = engine.execute(delete_record_SQL, repo_id=repo_id, min_date=min_date) - - logger.info("Deleting out of date data points ...\n") - delete_points_SQL = s.sql.text(""" - DELETE - FROM - repo_insights - USING ( - SELECT ri_metric, ri_field - FROM ( - SELECT * - FROM repo_insights - WHERE ri_fresh = TRUE - AND repo_id = :repo_id + with DatabaseEngine(connection_pool_size=1) as engine: + # Delete previous insights not in the anomaly_days param + min_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) + logger.info("MIN DATE: {}\n".format(min_date)) + logger.info("Deleting out of date records ...\n") + delete_record_SQL = s.sql.text(""" + DELETE + FROM + repo_insights_records + WHERE + repo_id = :repo_id AND ri_date < :min_date - ) old_insights - ) to_delete - WHERE repo_insights.ri_metric = to_delete.ri_metric - AND repo_insights.ri_field = to_delete.ri_field - """) - result = engine.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - - # get table values to check for dupes later on + """) + result = engine.execute(delete_record_SQL, repo_id=repo_id, min_date=min_date) - - table_values_sql = s.sql.text("""SELECT * FROM repo_insights_records WHERE repo_id={}""".format(repo_id)) - insight_table_values = pd.read_sql(table_values_sql, engine, params={}) + logger.info("Deleting out of date data points ...\n") + delete_points_SQL = s.sql.text(""" + DELETE + FROM + repo_insights + USING ( + SELECT ri_metric, ri_field + FROM ( + SELECT * + FROM repo_insights + WHERE ri_fresh = TRUE + AND repo_id = :repo_id + AND ri_date < :min_date + ) old_insights + ) to_delete + WHERE repo_insights.ri_metric = to_delete.ri_metric + AND repo_insights.ri_field = to_delete.ri_field + """) + result = engine.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) + + # get table values to check for dupes later on + + + table_values_sql = s.sql.text("""SELECT * FROM repo_insights_records WHERE repo_id={}""".format(repo_id)) + insight_table_values = pd.read_sql(table_values_sql, engine, params={}) to_model_columns = df.columns[0:len(metrics) + 1] @@ -321,8 +320,9 @@ def confidence_interval_insights(logger): # endpointSQL = s.sql.text(""" # SELECT * FROM chaoss_metric_status WHERE cm_source = 'augur_db' # """) - # for endpoint in pd.read_sql(endpointSQL, create_database_engine(), params={}).to_records(): - # endpoints.append(endpoint) + #with DatabaseEngine(connection_pool_size=1) as engine: + # for endpoint in pd.read_sql(endpointSQL,engine, params={}).to_records(): + # endpoints.append(endpoint) """""" @@ -523,7 +523,8 @@ def send_insight(insight, units_from_mean, logger): WHERE repo_id = {} """.format(insight['repo_id'])) - repo = pd.read_sql(repoSQL, create_database_engine(), params={}).iloc[0] + with DatabaseEngine(connection_pool_size=1) as engine: + repo = pd.read_sql(repoSQL, engine, params={}).iloc[0] begin_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) dict_date = insight['ri_date'].strftime("%Y-%m-%d %H:%M:%S") @@ -561,11 +562,9 @@ def clear_insights(repo_id, new_endpoint, new_field, logger): AND ri_field = '{}' """.format(repo_id, new_endpoint, new_field) try: - engine = create_database_engine() - result = engine.execute(deleteSQL) - engine.dispose() + with DatabaseEngine(connection_pool_size=1) as engine: + result = engine.execute(deleteSQL) except Exception as e: - engine.dispose() logger.info("Error occured deleting insight slot: {}".format(e)) # Delete all insights @@ -581,11 +580,9 @@ def clear_insights(repo_id, new_endpoint, new_field, logger): AND ri_field = '{}' """.format(repo_id, new_endpoint, new_field) try: - engine = create_database_engine() - result = engine.execute(deleteSQL) - engine.dispose() + with DatabaseEngine(connection_pool_size=1) as engine: + result = engine.execute(deleteSQL) except Exception as e: - engine.dispose() logger.info("Error occured deleting insight slot: {}".format(e)) def clear_insight(repo_id, new_score, new_metric, new_field, logger): @@ -604,7 +601,8 @@ def clear_insight(repo_id, new_score, new_metric, new_field, logger): AND ri_field = '{}' ORDER BY ri_score DESC """.format(repo_id, new_metric, new_field)) - rec = json.loads(pd.read_sql(recordSQL, create_database_engine(), params={}).to_json(orient='records')) + with DatabaseEngine(connection_pool_size=1) as engine: + rec = json.loads(pd.read_sql(recordSQL, engine, params={}).to_json(orient='records')) logger.info("recordsql: {}, \n{}".format(recordSQL, rec)) # If new score is higher, continue with deletion if len(rec) > 0: @@ -625,11 +623,9 @@ def clear_insight(repo_id, new_score, new_metric, new_field, logger): AND ri_field = '{}' """.format(record['repo_id'], record['ri_metric'], record['ri_field']) try: - engine = create_database_engine() - result = engineexecute(deleteSQL) - engine.dispose() + with DatabaseEngine(connection_pool_size=1) as engine: + result = engine.execute(deleteSQL) except Exception as e: - engine.dispose() logger.info("Error occured deleting insight slot: {}".format(e)) else: insertion_directions['record'] = True @@ -642,7 +638,8 @@ def clear_insight(repo_id, new_score, new_metric, new_field, logger): WHERE repo_id = {} ORDER BY ri_score ASC """.format(repo_id)) - ins = json.loads(pd.read_sql(insightSQL, create_database_engine(), params={}).to_json(orient='records')) + with DatabaseEngine(connection_pool_size=1) as engine: + ins = json.loads(pd.read_sql(insightSQL, engine, params={}).to_json(orient='records')) logger.info("This repos insights: {}".format(ins)) # Determine if inisghts need to be deleted based on if there are more insights than we want stored, @@ -680,11 +677,9 @@ def clear_insight(repo_id, new_score, new_metric, new_field, logger): AND ri_metric = '{}' """.format(insight['repo_id'], insight['ri_metric']) try: - engine = create_database_engine() - result = engine.execute(deleteSQL) - engine.dispose() + with DatabaseEngine(connection_pool_size=1) as engine: + result = engine.execute(deleteSQL) except Exception as e: - engine.dispose() logger.info("Error occured deleting insight slot: {}".format(e)) return insertion_directions @@ -752,7 +747,8 @@ def filter_duplicates(cols, tables, og_data, logger): colSQL = s.sql.text(""" SELECT {} FROM {} """.format(col, table_str)) - values = pd.read_sql(colSQL, create_database_engine(), params={}) + with DatabaseEngine(connection_pool_size=1) as engine: + values = pd.read_sql(colSQL, engine, params={}) for obj in og_data: if values.isin([obj[cols[col]]]).any().any(): diff --git a/augur/tasks/data_analysis/message_insights/tasks.py b/augur/tasks/data_analysis/message_insights/tasks.py index 0b5e470b42..cfb7b90ce4 100644 --- a/augur/tasks/data_analysis/message_insights/tasks.py +++ b/augur/tasks/data_analysis/message_insights/tasks.py @@ -14,7 +14,7 @@ from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, MessageAnalysis, MessageAnalysisSummary -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query #SPDX-License-Identifier: MIT @@ -50,7 +50,8 @@ def message_insight_model(repo_git: str) -> None: repo_exists_SQL = s.sql.text(""" SELECT exists (SELECT 1 FROM augur_data.message_analysis_summary WHERE repo_id = :repo_id LIMIT 1)""") - df_rep = pd.read_sql_query(repo_exists_SQL, create_database_engine(), params={'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_rep = pd.read_sql_query(repo_exists_SQL, engine, params={'repo_id': repo_id}) #full_train = not(df_rep['exists'].iloc[0]) logger.info(f'Full Train: {full_train}') @@ -75,7 +76,9 @@ def message_insight_model(repo_git: str) -> None: where message.repo_id = :repo_id """) - df_past = pd.read_sql_query(past_SQL, create_database_engine(), params={'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_past = pd.read_sql_query(past_SQL, engine, params={'repo_id': repo_id}) + df_past['msg_timestamp'] = pd.to_datetime(df_past['msg_timestamp']) df_past = df_past.sort_values(by='msg_timestamp') logger.debug(f'{df_past} is df_past') @@ -114,7 +117,8 @@ def message_insight_model(repo_git: str) -> None: left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where message.repo_id = :repo_id""") - df_message = pd.read_sql_query(join_SQL, create_database_engine(), params={'repo_id': repo_id, 'begin_date': begin_date}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_message = pd.read_sql_query(join_SQL, engine, params={'repo_id': repo_id, 'begin_date': begin_date}) logger.info(f'Messages dataframe dim: {df_message.shape}') logger.info(f'Value 1: {df_message.shape[0]}') @@ -149,7 +153,8 @@ def message_insight_model(repo_git: str) -> None: left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where issue_message_ref.repo_id = :repo_id""") - df_past = pd.read_sql_query(merge_SQL, create_database_engine(), params={'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_past = pd.read_sql_query(merge_SQL, engine, params={'repo_id': repo_id}) df_past = df_past.loc[df_past['novelty_flag'] == 0] rec_errors = df_past['reconstruction_error'].tolist() threshold = threshold_otsu(np.array(rec_errors)) @@ -337,7 +342,8 @@ def message_insight_model(repo_git: str) -> None: FROM message_analysis_summary WHERE repo_id=:repo_id""") - df_past = pd.read_sql_query(message_analysis_query, create_database_engine(), params={'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_past = pd.read_sql_query(message_analysis_query, engine, params={'repo_id': repo_id}) # df_past = get_table_values(cols=['period', 'positive_ratio', 'negative_ratio', 'novel_count'], # tables=['message_analysis_summary'], @@ -406,7 +412,9 @@ def send_insight(repo_id, insights, logger): WHERE repo_id = {} """.format(repo_id)) - repo = pd.read_sql(repoSQL, create_database_engine(), params={}).iloc[0] + with DatabaseEngine(connection_pool_size=1) as engine: + repo = pd.read_sql(repoSQL, engine, params={}).iloc[0] + to_send = { 'message_insight': True, 'repo_git': repo['repo_git'], @@ -440,8 +448,8 @@ def get_max_id(table, column, logger, default=25150): SELECT max({0}.{1}) AS {1} FROM {0} """.format(table, column)) - db = create_database_engine() - rs = pd.read_sql(max_id_sql, db, params={}) + with DatabaseEngine(connection_pool_size=1) as engine: + rs = pd.read_sql(max_id_sql, engine, params={}) if rs.iloc[0][column] is not None: max_id = int(rs.iloc[0][column]) + 1 logger.info("Found max id for {} column in the {} table: {}\n".format(column, table, max_id)) @@ -450,6 +458,4 @@ def get_max_id(table, column, logger, default=25150): logger.warning("Could not find max id for {} column in the {} table... " + "using default set to: {}\n".format(column, table, max_id)) - db.dispose() - return max_id diff --git a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py index 6ebd41acc0..9a78f896c0 100644 --- a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py +++ b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py @@ -11,7 +11,7 @@ from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, PullRequestAnalysis -from augur.application.db.engine import create_database_engine +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query # from sklearn.metrics import (confusion_matrix, f1_score, precision_score, recall_score) @@ -62,7 +62,8 @@ def pull_request_analysis_model(repo_git: str) -> None: and pr_src_state like 'open' """) - df_pr = pd.read_sql_query(pr_SQL, create_database_engine(), params={'begin_date': begin_date, 'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_pr = pd.read_sql_query(pr_SQL, engine, params={'begin_date': begin_date, 'repo_id': repo_id}) logger.info(f'PR Dataframe dim: {df_pr.shape}\n') @@ -94,13 +95,16 @@ def pull_request_analysis_model(repo_git: str) -> None: left outer join augur_data.issue_message_ref on message.msg_id = issue_message_ref.msg_id left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where issue_message_ref.repo_id = :repo_id""") - df_message = pd.read_sql_query(messages_SQL, create_database_engine(), params={'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + df_message = pd.read_sql_query(messages_SQL, engine, params={'repo_id': repo_id}) logger.info(f'Mapping messages to PR, find comment & participants counts') # Map PR to its corresponding messages - pr_ref_sql = s.sql.text("select * from augur_data.pull_request_message_ref") - df_pr_ref = pd.read_sql_query(pr_ref_sql, create_database_engine()) + + with DatabaseEngine(connection_pool_size=1) as engine: + pr_ref_sql = s.sql.text("select * from augur_data.pull_request_message_ref") + df_pr_ref = pd.read_sql_query(pr_ref_sql, engine) df_merge = pd.merge(df_pr, df_pr_ref, on='pull_request_id', how='left') df_merge = pd.merge(df_merge, df_message, on='msg_id', how='left') df_merge = df_merge.dropna(subset=['msg_id'], axis=0) @@ -149,11 +153,12 @@ def pull_request_analysis_model(repo_git: str) -> None: logger.info(f'Fetching repo statistics') # Get repo info - repo_sql = s.sql.text(""" - SELECT repo_id, pull_requests_merged, pull_request_count,watchers_count, last_updated FROM - augur_data.repo_info where repo_id = :repo_id - """) - df_repo = pd.read_sql_query(repo_sql, create_database_engine(), params={'repo_id': repo_id}) + with DatabaseEngine(connection_pool_size=1) as engine: + repo_sql = s.sql.text(""" + SELECT repo_id, pull_requests_merged, pull_request_count,watchers_count, last_updated FROM + augur_data.repo_info where repo_id = :repo_id + """) + df_repo = pd.read_sql_query(repo_sql, engine, params={'repo_id': repo_id}) df_repo = df_repo.loc[df_repo.groupby('repo_id').last_updated.idxmax(), :] df_repo = df_repo.drop(['last_updated'], axis=1) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index d02a3cf19f..cb6ed03727 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -193,6 +193,9 @@ def update_analysis_log(repos_id,status): #encode the repo_id with the commit. commits_with_repo_tuple = [(commit,repo_id) for commit in list(missing_commits)] + + #1/21/2023: SPG things list needs to be initialized based on error + all_missing_commits = [] #Get all missing commits into one large list to split into task pools all_missing_commits.extend(commits_with_repo_tuple) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 4267264ebf..9f3adbc446 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -22,8 +22,10 @@ def get_release_inf(session, repo_id, release, tag_only): name = "N/A" company = "N/A" else: - name = "" if release['author']['name'] is None else release['author']['name'] - company = "" if release['author']['company'] is None else release['author']['company'] + author = release["author"] + + name = author.get("name") or "" + company = author.get("company") or "" author = name + '_' + company @@ -44,19 +46,15 @@ def get_release_inf(session, repo_id, release, tag_only): } else: if 'tagger' in release['target']: - if 'name' in release['target']['tagger']: - name = release['target']['tagger']['name'] - else: - name = "" - if 'email' in release['target']['tagger'] and release['target']['tagger']['email']: - email = '_' + release['target']['tagger']['email'] - else: - email = "" - author = name + email - if 'date' in release['target']['tagger']: - date = release['target']['tagger']['date'] - else: - date = "" + + tagger = release["target"]["tagger"] + + date = tagger.get("date") or "" + name = tagger.get("name") or "" + email = tagger.get("email") or "" + + author = name + "_" + email + else: author = "" date = "" diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 8df5bea1e1..d1bec5c68a 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -140,9 +140,9 @@ def init_worker(**kwargs): global engine - from augur.application.db.engine import create_database_engine + from augur.application.db.engine import DatabaseEngine - engine = create_database_engine() + engine = DatabaseEngine().engine @worker_process_shutdown.connect diff --git a/augur/util/repo_load_controller.py b/augur/util/repo_load_controller.py index 9c0317cee6..60f9f46d7c 100644 --- a/augur/util/repo_load_controller.py +++ b/augur/util/repo_load_controller.py @@ -7,11 +7,11 @@ from typing import List, Any, Dict -from augur.application.db.engine import create_database_engine from augur.tasks.github.util.github_paginator import hit_api from augur.tasks.github.util.github_paginator import GithubPaginator from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.application.db.models import Repo, UserRepo, RepoGroup, UserGroup, User from augur.application.db.util import execute_session_query @@ -562,7 +562,10 @@ def paginate_repos(self, source, page=0, page_size=25, sort="repo_id", direction get_page_of_repos_sql = s.sql.text(query[0]) - results = pd.read_sql(get_page_of_repos_sql, create_database_engine()) + with DatabaseEngine(connection_pool_size=1) as engine: + + results = pd.read_sql(get_page_of_repos_sql, engine) + results['url'] = results['url'].apply(lambda datum: datum.split('//')[1]) b64_urls = [] diff --git a/tests/test_applicaton/test_db/test_session.py b/tests/test_applicaton/test_db/test_session.py index 2e55d71653..83c95b85cd 100644 --- a/tests/test_applicaton/test_db/test_session.py +++ b/tests/test_applicaton/test_db/test_session.py @@ -10,8 +10,7 @@ not_provided_cntrb_id = '00000000-0000-0000-0000-000000000000' nan_cntrb_id = '01000000-0000-0000-0000-000000000000' - - +# TODO: Add test that does not pass an engine to the Session def test_execute_sql(test_db_engine): @@ -247,3 +246,28 @@ def test_insert_issue_data_with_invalid_strings(test_db_engine): DELETE FROM "augur_data"."repo"; DELETE FROM "augur_data"."repo_groups"; """) + + +def test_session_without_passing_engine(): + + # with DatabaseSession(logger) as session: + + session = DatabaseSession(logger) + + assert session is not None + assert session.engine is not None + + with session.engine.connect() as connection: + + # insert the cntrb_id and cntrb_login into the contributors table so the contributor is present. + # This is so we don't get a foreign key error on the cntrb_id when we insert the prs + query = s.sql.text("""SELECT * FROM repo""") + + result = connection.execute(query) + data = result.fetchall() + + assert result is not None + assert data is not None + assert isinstance(data, list) + + diff --git a/tests/test_routes/test_api_functionality/test_commit_routes_api.py b/tests/test_routes/test_api_functionality/test_commit_routes_api.py index 8ec7e6d4d9..1ec9ce69f5 100644 --- a/tests/test_routes/test_api_functionality/test_commit_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_commit_routes_api.py @@ -6,45 +6,54 @@ def test_annual_commit_count_ranked_by_new_repo_in_repo_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-new-repo-in-repo-group/') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-commit-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_commit_count_ranked_by_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-commit-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_commit_count_ranked_by_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_top_committers_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/top-committers') + assert response is not None data = response.json() assert response.status_code == 200 def test_top_committers_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/top-committers') + assert response is not None data = response.json() assert response.status_code == 200 def test_committer_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/committers') + assert response is not None data = response.json() assert response.status_code == 200 def test_committer_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/committers?period=year') + assert response is not None data = response.json() assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_contributor_routes_api.py b/tests/test_routes/test_api_functionality/test_contributor_routes_api.py index 0198563cce..6a01f9bdfd 100644 --- a/tests/test_routes/test_api_functionality/test_contributor_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_contributor_routes_api.py @@ -6,21 +6,25 @@ def test_contributors_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/contributors') + assert response is not None data = response.json() assert response.status_code == 200 def test_contributors_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/contributors') + assert response is not None data = response.json() assert response.status_code == 200 def test_contributors_new_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/contributors-new') + assert response is not None data = response.json() assert response.status_code == 200 def test_contributors_new_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/contributors-new') + assert response is not None data = response.json() assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_issue_routes_api.py b/tests/test_routes/test_api_functionality/test_issue_routes_api.py index 5a2fcf14fe..280e12d386 100644 --- a/tests/test_routes/test_api_functionality/test_issue_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_issue_routes_api.py @@ -7,170 +7,204 @@ def test_issues_new_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-new') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_new_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-new') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_active_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-active') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_active_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-active') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_closed_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-closed') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_closed_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-closed') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_duration_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-duration') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_duration_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-duration') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_participants_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-participants') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_participants_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-participants') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_throughput_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-throughput') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_throughput_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-throughput') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_backlog_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-backlog') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_backlog_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-backlog') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_first_time_opened_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-first-time-opened') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_first_time_opened_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-first-time-opened') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_first_time_closed_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/20/issues-first-time-closed') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_first_time_closed_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/20/repos/25430/issues-first-time-closed') + assert response is not None data = response.json() assert response.status_code == 200 def test_open_issues_count_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/open-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 def test_open_issues_count_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/open-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 def test_closed_issues_count_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/closed-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 def test_closed_issues_count_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/closed-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_open_age_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-open-age/') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_open_age_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-open-age/') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_closed_resolution_duration_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-closed-resolution-duration/') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_closed_resolution_duration_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-closed-resolution-duration/') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_maintainer_response_duration_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-maintainer-response-duration/') + assert response is not None data = response.json() assert response.status_code == 200 def test_issues_maintainer_response_duration_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-maintainer-response-duration/') + assert response is not None data = response.json() assert response.status_code == 200 def test_average_issue_resolution_time_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/average-issue-resolution-time') + assert response is not None data = response.json() assert response.status_code == 200 def test_average_issue_resolution_time_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/average-issue-resolution-time') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_comments_mean_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-comments-mean') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_comments_mean_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-comments-mean') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_comments_mean_std_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-comments-mean-std') + assert response is not None data = response.json() assert response.status_code == 200 def test_issue_comments_mean_std_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-comments-mean-std') + assert response is not None data = response.json() assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py b/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py index a327299a74..fef8e3498c 100644 --- a/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py @@ -7,16 +7,19 @@ def test_pull_requests_merge_contributor_new_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/pull-requests-merge-contributor-new') + assert response is not None data = response.json() assert response.status_code == 200 def test_pull_requests_merge_contributor_new_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/pull-requests-merge-contributor-new') + assert response is not None data = response.json() assert response.status_code == 200 def test_pull_requests_closed_no_merge_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repos/25430/pull-requests-closed-no-merge') + assert response is not None data = response.json() assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py b/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py index a4c1526206..9d8303109a 100644 --- a/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py @@ -7,36 +7,43 @@ def test_code_changes_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/code-changes') + assert response is not None data = response.json() assert response.status_code == 200 def test_code_changes_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/code-changes') + assert response is not None data = response.json() assert response.status_code == 200 def test_code_changes_lines_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/code-changes-lines') + assert response is not None data = response.json() assert response.status_code == 200 def test_code_changes_lines_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/code-changes-lines') + assert response is not None data = response.json() assert response.status_code == 200 def test_sub_projects_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/sub-projects') + assert response is not None data = response.json() assert response.status_code == 200 def test_sub_projects_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/sub-projects') + assert response is not None data = response.json() assert response.status_code == 200 def test_cii_best_practices_badge_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/cii-best-practices-badge') + assert response is not None data = response.json() assert response.status_code == 200 @@ -61,41 +68,49 @@ def test_languages_by_repo_api_is_functional(): def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-lines-of-code-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-lines-of-code-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-lines-of-code-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-lines-of-code-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 # def test_license_coverage_by_group_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-coverage') + assert response is not None # data = response.json() # assert response.status_code == 200 # def test_license_coverage_by_repo_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-coverage') + assert response is not None # data = response.json() # assert response.status_code == 200 # def test_license_count_by_group_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-count') + assert response is not None # data = response.json() # assert response.status_code == 200 # def test_license_count_by_repo_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-count') + assert response is not None # data = response.json() # assert response.status_code == 200 diff --git a/tests/test_routes/test_routes_data/test_commit_routes_data.py b/tests/test_routes/test_routes_data/test_commit_routes_data.py index 637da608e6..7085dd5b4a 100644 --- a/tests/test_routes/test_routes_data/test_commit_routes_data.py +++ b/tests/test_routes/test_routes_data/test_commit_routes_data.py @@ -7,6 +7,7 @@ def test_annual_commit_count_ranked_by_new_repo_in_repo_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-new-repo-in-repo-group/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -14,6 +15,7 @@ def test_annual_commit_count_ranked_by_new_repo_in_repo_group_api_data(): def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-commit-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -21,6 +23,7 @@ def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_repo_api_data() def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -29,6 +32,7 @@ def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_group_api_data( def test_annual_commit_count_ranked_by_repo_in_repo_group_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-commit-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -36,6 +40,7 @@ def test_annual_commit_count_ranked_by_repo_in_repo_group_by_repo_api_data(): def test_annual_commit_count_ranked_by_repo_in_repo_group_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -43,6 +48,7 @@ def test_annual_commit_count_ranked_by_repo_in_repo_group_by_group_api_data(): def test_top_committers_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/top-committers') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -50,6 +56,7 @@ def test_top_committers_by_repo_api_data(): def test_top_committers_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/top-committers') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -57,12 +64,14 @@ def test_top_committers_by_group_api_data(): def test_committer_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/committers') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 def test_committer_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/committers?period=year') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 diff --git a/tests/test_routes/test_routes_data/test_contributor_routes_data.py b/tests/test_routes/test_routes_data/test_contributor_routes_data.py index 8cd3e80bbe..83be0a7cf3 100644 --- a/tests/test_routes/test_routes_data/test_contributor_routes_data.py +++ b/tests/test_routes/test_routes_data/test_contributor_routes_data.py @@ -6,6 +6,7 @@ def test_contributors_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/contributors') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -14,6 +15,7 @@ def test_contributors_by_group_api_data(): def test_contributors_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/contributors') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -21,6 +23,7 @@ def test_contributors_by_repo_api_data(): def test_contributors_new_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/contributors-new') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -29,6 +32,7 @@ def test_contributors_new_by_group_api_data(): def test_contributors_new_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/contributors-new') + assert response is not None print(response) data = response.json() assert response.status_code == 200 diff --git a/tests/test_routes/test_routes_data/test_issue_routes_data.py b/tests/test_routes/test_routes_data/test_issue_routes_data.py index 32140cd460..5af4ca5bc0 100644 --- a/tests/test_routes/test_routes_data/test_issue_routes_data.py +++ b/tests/test_routes/test_routes_data/test_issue_routes_data.py @@ -6,6 +6,7 @@ def test_issues_new_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-new') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -13,6 +14,7 @@ def test_issues_new_by_group_api_data(): def test_issues_new_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-new') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -20,6 +22,7 @@ def test_issues_new_by_repo_api_data(): def test_issues_active_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-active') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -27,6 +30,7 @@ def test_issues_active_by_group_api_data(): def test_issues_active_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-active') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -34,6 +38,7 @@ def test_issues_active_by_repo_api_data(): def test_issues_closed_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-closed') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -41,6 +46,7 @@ def test_issues_closed_by_group_api_data(): def test_issues_closed_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-closed') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -48,18 +54,21 @@ def test_issues_closed_by_repo_api_data(): def test_issue_duration_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-duration') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 def test_issue_duration_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-duration') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 def test_issue_participants_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-participants') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -67,6 +76,7 @@ def test_issue_participants_by_group_api_data(): def test_issue_participants_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-participants') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -74,6 +84,7 @@ def test_issue_participants_by_repo_api_data(): def test_issue_throughput_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-throughput') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -81,6 +92,7 @@ def test_issue_throughput_by_group_api_data(): def test_issue_throughput_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-throughput') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -88,6 +100,7 @@ def test_issue_throughput_by_repo_api_data(): def test_issue_backlog_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-backlog') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -95,6 +108,7 @@ def test_issue_backlog_by_group_api_data(): def test_issue_backlog_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-backlog') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -102,6 +116,7 @@ def test_issue_backlog_by_repo_api_data(): def test_issues_first_time_opened_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-first-time-opened') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -109,6 +124,7 @@ def test_issues_first_time_opened_by_group_api_data(): def test_issues_first_time_opened_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-first-time-opened') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -116,6 +132,7 @@ def test_issues_first_time_opened_by_repo_api_data(): def test_issues_first_time_closed_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/20/issues-first-time-closed') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -123,6 +140,7 @@ def test_issues_first_time_closed_by_group_api_data(): def test_issues_first_time_closed_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/20/repos/25430/issues-first-time-closed') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -130,6 +148,7 @@ def test_issues_first_time_closed_by_repo_api_data(): def test_open_issues_count_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/open-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -137,6 +156,7 @@ def test_open_issues_count_by_group_api_data(): def test_open_issues_count_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/open-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -144,6 +164,7 @@ def test_open_issues_count_by_repo_api_data(): def test_closed_issues_count_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/closed-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -151,6 +172,7 @@ def test_closed_issues_count_by_group_api_data(): def test_closed_issues_count_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/closed-issues-count') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -158,6 +180,7 @@ def test_closed_issues_count_by_repo_api_data(): def test_issues_open_age_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-open-age/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -165,6 +188,7 @@ def test_issues_open_age_by_group_api_data(): def test_issues_open_age_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-open-age/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -172,6 +196,7 @@ def test_issues_open_age_by_repo_api_data(): def test_issues_closed_resolution_duration_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-closed-resolution-duration/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -180,6 +205,7 @@ def test_issues_closed_resolution_duration_by_group_api_data(): def test_issues_closed_resolution_duration_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-closed-resolution-duration/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -187,6 +213,7 @@ def test_issues_closed_resolution_duration_by_repo_api_data(): def test_issues_maintainer_response_duration_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-maintainer-response-duration/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -194,6 +221,7 @@ def test_issues_maintainer_response_duration_by_repo_api_data(): def test_issues_maintainer_response_duration_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-maintainer-response-duration/') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -202,36 +230,42 @@ def test_issues_maintainer_response_duration_by_group_api_data(): def test_average_issue_resolution_time_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/average-issue-resolution-time') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) > 0 def test_average_issue_resolution_time_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/average-issue-resolution-time') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) > 0 def test_issue_comments_mean_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-comments-mean') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 def test_issue_comments_mean_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-comments-mean') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 def test_issue_comments_mean_std_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-comments-mean-std') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 def test_issue_comments_mean_std_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-comments-mean-std') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 diff --git a/tests/test_routes/test_routes_data/test_pull_request_routes_data.py b/tests/test_routes/test_routes_data/test_pull_request_routes_data.py index 6350aa283a..7a40cbd734 100644 --- a/tests/test_routes/test_routes_data/test_pull_request_routes_data.py +++ b/tests/test_routes/test_routes_data/test_pull_request_routes_data.py @@ -6,6 +6,7 @@ def test_pull_requests_merge_contributor_new_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/pull-requests-merge-contributor-new') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -13,6 +14,7 @@ def test_pull_requests_merge_contributor_new_by_group_api_data(): def test_pull_requests_merge_contributor_new_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/pull-requests-merge-contributor-new') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -20,6 +22,7 @@ def test_pull_requests_merge_contributor_new_by_repo_api_data(): def test_pull_requests_closed_no_merge_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repos/25430/pull-requests-closed-no-merge') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 diff --git a/tests/test_routes/test_routes_data/test_repo_meta_routes_data.py b/tests/test_routes/test_routes_data/test_repo_meta_routes_data.py index bdd21fc1e7..076b2b6843 100644 --- a/tests/test_routes/test_routes_data/test_repo_meta_routes_data.py +++ b/tests/test_routes/test_routes_data/test_repo_meta_routes_data.py @@ -6,6 +6,7 @@ def test_code_changes_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/code-changes') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -13,6 +14,7 @@ def test_code_changes_by_group_api_data(): def test_code_changes_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/code-changes') + assert response is not None data = response.json() assert response.status_code == 200 # assert len(data) >= 1 @@ -20,6 +22,7 @@ def test_code_changes_by_repo_api_data(): def test_code_changes_lines_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/code-changes-lines') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -28,6 +31,7 @@ def test_code_changes_lines_by_group_api_data(): def test_code_changes_lines_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/code-changes-lines') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -36,6 +40,7 @@ def test_code_changes_lines_by_repo_api_data(): def test_sub_projects_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/sub-projects') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -43,6 +48,7 @@ def test_sub_projects_by_group_api_data(): def test_sub_projects_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/sub-projects') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -50,6 +56,7 @@ def test_sub_projects_by_repo_api_data(): def test_cii_best_practices_badge_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/cii-best-practices-badge') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -64,18 +71,21 @@ def test_languages_by_repo_api_data(): # def test_license_declared_by_group_api_data(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-declared') + assert response is not None # data = response.json() # assert response.status_code == 200 # assert len(data) >= 1 # def test_license_declared_by_repo_api_data(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-declared') + assert response is not None # data = response.json() # assert response.status_code == 200 # assert len(data) >= 1 def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-lines-of-code-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -83,6 +93,7 @@ def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_repo_api def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-lines-of-code-count-ranked-by-new-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -91,6 +102,7 @@ def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_group_ap def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_repo_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-lines-of-code-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -98,6 +110,7 @@ def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_repo_api_dat def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_group_api_data(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-lines-of-code-count-ranked-by-repo-in-repo-group') + assert response is not None data = response.json() assert response.status_code == 200 assert len(data) >= 1 @@ -105,24 +118,28 @@ def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_group_api_da # def test_license_coverage_by_group_api_data(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-coverage') + assert response is not None # data = response.json() # assert response.status_code == 200 # assert len(data) >= 1 # def test_license_coverage_by_repo_api_data(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-coverage') + assert response is not None # data = response.json() # assert response.status_code == 200 # assert len(data) >= 1 # def test_license_count_by_group_api_data(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-count') + assert response is not None # data = response.json() # assert response.status_code == 200 # assert len(data) >= 1 # def test_license_count_by_repo_api_data(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-count') + assert response is not None # data = response.json() # assert response.status_code == 200 # assert len(data) >= 1 diff --git a/tests/test_tasks/test_github_tasks/test_pull_requests.py b/tests/test_tasks/test_github_tasks/test_pull_requests.py index 97a35abd16..4a086950f4 100644 --- a/tests/test_tasks/test_github_tasks/test_pull_requests.py +++ b/tests/test_tasks/test_github_tasks/test_pull_requests.py @@ -9,8 +9,6 @@ from augur.application.db.models import Config from augur.tasks.util.AugurUUID import GithubUUID from augur.application.db.data_parse import extract_needed_contributor_data -from augur.application.db.engine import create_database_engine -from augur.application.db.util import execute_session_query logger = logging.getLogger(__name__) not_provided_cntrb_id = '00000000-0000-0000-0000-000000000000' @@ -21,8 +19,7 @@ def github_api_key_headers(): with DatabaseSession(logger) as session: - query = session.query(Config).filter(Config.section_name == "Keys", Config.setting_name == "github_api_key") - api_key = execute_session_query(query, 'one').value + api_key = session.query(Config).filter(Config.section_name == "Keys", Config.setting_name == "github_api_key").one().value headers = {"Authorization": f'token {api_key}'} From a2ed153a53031ab24e20b32c2a1b9a0d900a3650 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Sat, 21 Jan 2023 17:24:08 -0600 Subject: [PATCH 018/134] Docs fix (#2136) * scaling fix for repo_move Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Change to rabbitmq broker Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * don't ignore result Signed-off-by: Isaac Milarsky * More logging in detect_github_repo_move Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * print Signed-off-by: Isaac Milarsky * re-add facade contributors to task queue Signed-off-by: Isaac Milarsky * better handling and logging files model Signed-off-by: Isaac Milarsky * take advantage of rabbitmq allowing us to use celery result Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * get rid of redundant definition Signed-off-by: Isaac Milarsky * docs update Signed-off-by: Isaac Milarsky * Change celery task scheduling to not scale proportionally to the amount of repos Signed-off-by: Isaac Milarsky * analysis sequence pooling for facade scaling Signed-off-by: Isaac Milarsky * need to fix issues with accessing redis Signed-off-by: Isaac Milarsky * don't create so many sessions Signed-off-by: Isaac Milarsky * Update Signed-off-by: Isaac Milarsky * doc update * fix facade date query error Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * updating MQ and REDIS Docs * Updates to docs. * documentation updatese * test * documentation updates * doc hell * trying * analyze_commits_in_parallel now shows progress in quarters in the logs. Also applied same scaling changes to facade contributor resolution in insert_facade_contributors Signed-off-by: Isaac Milarsky * sql format Signed-off-by: Isaac Milarsky * Typo Signed-off-by: Isaac Milarsky * skeleton for deps worker Signed-off-by: Isaac Milarsky * Better error handling Signed-off-by: Isaac Milarsky * add dependency util files from main-old Signed-off-by: Isaac Milarsky * Dependency worker Signed-off-by: Isaac Milarsky * add dependency model to repo_collect Signed-off-by: Isaac Milarsky * Syntax Signed-off-by: Isaac Milarsky * Facade tasks not getting ran for some reason Signed-off-by: Isaac Milarsky * add file Signed-off-by: Isaac Milarsky * python import Signed-off-by: Isaac Milarsky * make sure rabbitmq messages are cleared Signed-off-by: Isaac Milarsky * schedule less at once Signed-off-by: Isaac Milarsky * Grab correct vhost from config Signed-off-by: Isaac Milarsky * optimistic Signed-off-by: Isaac Milarsky * Change repo_collect to split up task load for smaller message Signed-off-by: Isaac Milarsky * version Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * Low load patch Signed-off-by: Isaac Milarsky * Shrink facade messages Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * increase message load Signed-off-by: Isaac Milarsky * Re-add the rabbitmq instructions and updated docker instructions Signed-off-by: Isaac Milarsky * Delete result.txt File added by mistake * Update setup.py added missing comma * Update facade_tasks.py Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky Co-authored-by: Sean Goggins Co-authored-by: Sean P. Goggins --- augur/tasks/git/facade_tasks.py | 4 +-- augur/tasks/github/releases/core.py | 26 +++++++++------ docs/source/docker/docker-compose.rst | 7 ---- docs/source/docker/getting-started.rst | 3 +- docs/source/getting-started/installation.rst | 34 +++++++++++++++++++ docs/source/quick-start.rst | 35 ++++++++++++++++---- 6 files changed, 80 insertions(+), 29 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index cb6ed03727..354aa9dbb7 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -196,6 +196,7 @@ def update_analysis_log(repos_id,status): #1/21/2023: SPG things list needs to be initialized based on error all_missing_commits = [] + #Get all missing commits into one large list to split into task pools all_missing_commits.extend(commits_with_repo_tuple) @@ -207,6 +208,7 @@ def update_analysis_log(repos_id,status): update_analysis_log(repo_id,'Beginning to trim commits') + session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") @@ -253,7 +255,6 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: for repo_id in repo_ids: session.logger.info(f"Generating sequence for repo {repo_id}") - query = session.query(Repo).filter(Repo.repo_id == repo_id) repo = execute_session_query(query, 'one') @@ -317,7 +318,6 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: query = session.query(Repo).filter(Repo.repo_id == repo_id) repo = execute_session_query(query,'one') - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") analyze_commit(session, repo_id, repo_loc, commitTuple) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 9f3adbc446..30da72d4e6 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -24,8 +24,8 @@ def get_release_inf(session, repo_id, release, tag_only): else: author = release["author"] - name = author.get("name") or "" - company = author.get("company") or "" + name = author.get("name") or "nobody" + company = author.get("company") or "nocompany" author = name + '_' + company @@ -46,15 +46,19 @@ def get_release_inf(session, repo_id, release, tag_only): } else: if 'tagger' in release['target']: - - tagger = release["target"]["tagger"] - - date = tagger.get("date") or "" - name = tagger.get("name") or "" - email = tagger.get("email") or "" - - author = name + "_" + email - + if 'name' in release['target']['tagger']: + name = release['target']['tagger']['name'] + else: + name = "nobody" + if 'email' in release['target']['tagger'] and release['target']['tagger']['email']: + email = '_' + release['target']['tagger']['email'] + else: + email = "noemail" + author = name + email + if 'date' in release['target']['tagger']: + date = release['target']['tagger']['date'] + else: + date = "" else: author = "" date = "" diff --git a/docs/source/docker/docker-compose.rst b/docs/source/docker/docker-compose.rst index a429e5a498..bd074026c1 100644 --- a/docs/source/docker/docker-compose.rst +++ b/docs/source/docker/docker-compose.rst @@ -52,13 +52,6 @@ To run Augur **with** the database container: docker-compose -f docker-compose.yml -f database-compose.yml up -If you want to use the ``test_data`` image with the data preloaded, change the ``image`` line of ``database-compose.yml`` to: - -.. code:: - - image: augurlabs/augur:test_data - -Or you can set it dynamically in the .env file. Stopping the containers ------------------------- diff --git a/docs/source/docker/getting-started.rst b/docs/source/docker/getting-started.rst index 69c3ef59bd..1ca0e607d4 100644 --- a/docs/source/docker/getting-started.rst +++ b/docs/source/docker/getting-started.rst @@ -13,7 +13,6 @@ Augur provides several Docker images designed to get you started with our softwa The frontend is very out of date and will likely not work. It is still available, but it is in the process of being replaced with an entirely new frontend so the old frontend is not being actively fixed. - ``augurlabs/augur:database``, an empty PostgreSQL database with the Augur schema installed -- ``augurlabs/augur:test_data``, a PostgreSQL database loaded with the data used in our testing environment If you're not familiar with Docker, their `starting guide `_ is a great resource. @@ -23,7 +22,7 @@ If you are less familiar with Docker, or experience issues you cannot resolve at Credentials ------------ -Before you get started with Docker, you'll need to set up a PostgreSQL instance either locally or using a remote host. Alternatively, you can also set up the database within a docker container either manually or through the script but this is not recommended. +Before you get started with Docker, you'll need to set up a PostgreSQL instance either locally or using a remote host. Alternatively, you can also set up the database within a docker container either manually or through docker-compose. .. note:: diff --git a/docs/source/getting-started/installation.rst b/docs/source/getting-started/installation.rst index e1eadd23ea..df0379f1f9 100644 --- a/docs/source/getting-started/installation.rst +++ b/docs/source/getting-started/installation.rst @@ -46,6 +46,40 @@ Caching System (Redis) * `Mac Installation `__ * `Windows Installation `__ +Message Broker (RabbitMQ) +---------------- +* `Linux Installation `__ +* `Mac Installation `__ +* `Windows Installation `__ + +After installation, you must also set up your rabbitmq instance by running the below commands: + +.. code-block:: bash + + sudo rabbitmqctl add_user augur password123 + + sudo rabbitmqctl add_vhost augur_vhost + + sudo rabbitmqctl set_user_tags augur augurTag + + sudo rabbitmqctl set_permissions -p augur_vhost augur ".*" ".*" ".*" + +.. note:: + it is important to have a static hostname when using rabbitmq as it uses hostname + to communicate with nodes. + +Then, start rabbitmq server with +.. code-block:: bash + + sudo systemctl start rabbitmq.service + + +If your setup of rabbitmq is successful your broker url should look like this: + +broker_url = 'amqp://augur:password123@localhost:5672/augur_vhost' + +During installation you will be prompted for this broker url. + Frontend --------- If you're interested in using our visualizations, you can optionally install the frontend dependencies: diff --git a/docs/source/quick-start.rst b/docs/source/quick-start.rst index c99c2523f7..56dab179d1 100644 --- a/docs/source/quick-start.rst +++ b/docs/source/quick-start.rst @@ -105,18 +105,39 @@ Then, once you've connected to your PostgreSQL instance: postgres=# CREATE USER augur WITH ENCRYPTED PASSWORD 'password'; postgres=# GRANT ALL PRIVILEGES ON DATABASE augur TO augur; -Git Configuration +Install RabbitMQ ~~~~~~~~~~~~~~~~~~~~~~~~ -- Configure Git: These instructions assume the potential of large repositories that occasionally perform significant refactoring within a small number of commits. Our experience is that nearly all organizations have at least one project that meets these criteria. +To set up rabbitmq for augur you must install it with the relevant package manager +for your distro. You can find more info on how to install rabbitmq `here `_. +After installation, you must also set up your rabbitmq instance by running the below commands: + +.. code-block:: bash + + sudo rabbitmqctl add_user augur password123 + + sudo rabbitmqctl add_vhost augur_vhost + + sudo rabbitmqctl set_user_tags augur augurTag + + sudo rabbitmqctl set_permissions -p augur_vhost augur ".*" ".*" ".*" + +.. note:: + it is important to have a static hostname when using rabbitmq as it uses hostname + to communicate with nodes. + +Then, start rabbitmq server with .. code-block:: bash - git config --global diff.renames true - git config --global diff.renameLimit 200000 - git config --global credential.helper cache - git config --global credential.helper 'cache --timeout=9999999999999' + sudo systemctl start rabbitmq.service + + +If your setup of rabbitmq is successful your broker url should look like this: + +broker_url = 'amqp://augur:password123@localhost:5672/augur_vhost' + +During installation you will be prompted for this broker url. -- For each platform, perform a command-line login to cache Git credentials for the LINUX user who operates Augur. This step is required in order to prevent the Facade Commit Counting Diesel from stalling on a command-line prompt when repositories move or disappear. Install Go ~~~~~~~~~~~~~~~~~~~~~~~~ From 91d91355c96b35f9be2ca26949efd6862bd7651f Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sat, 21 Jan 2023 17:59:17 -0600 Subject: [PATCH 019/134] Fixing modulo 0 error: (#2138) ``` Traceback (most recent call last): File "/home/sean/github/virtualenv/ag3/lib/python3.8/site-packages/celery/app/trace.py", line 451, in trace_task R = retval = fun(*args, **kwargs) File "/home/sean/github/virtualenv/ag3/lib/python3.8/site-packages/celery/app/trace.py", line 734, in __protected_call__ return self.run(*args, **kwargs) File "/home/sean/github/ag3/augur/tasks/git/facade_tasks.py", line 315, in analyze_commits_in_parallel if (count + 1) % int(len(queue) / 4) == 0: ZeroDivisionError: integer division or modulo by zero ``` --- augur/tasks/git/facade_tasks.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 354aa9dbb7..64283d3842 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -312,8 +312,10 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: for count, commitTuple in enumerate(queue): #Log progress when another quarter of the queue has been processed - if (count + 1) % int(len(queue) / 4) == 0: - logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") + #Checking for Modulo of Zero first. + if int(len(queue)/4)!=0: + if (count + 1) % int(len(queue) / 4) == 0: + logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") query = session.query(Repo).filter(Repo.repo_id == repo_id) repo = execute_session_query(query,'one') From 419f9c7c8b3fc7da64059d0d3fcc2fd5d81c446a Mon Sep 17 00:00:00 2001 From: Sean Goggins Date: Mon, 23 Jan 2023 10:34:10 -0600 Subject: [PATCH 020/134] Refixing this error, which got hosed in the merge yesterday: ``` Traceback (most recent call last): File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 451, in trace_task R = retval = fun(*args, **kwargs) File "/home/sean/github/virtualenv/k12/lib/python3.8/site-packages/celery/app/trace.py", line 734, in __protected_call__ return self.run(*args, **kwargs) File "/home/sean/github/rh-k12/augur/tasks/github/releases/tasks.py", line 15, in collect_releases releases_model(session, repo.repo_git, repo.repo_id) File "/home/sean/github/rh-k12/augur/tasks/github/releases/core.py", line 209, in releases_model insert_release(session, repo_id, data['owner'], release, True) File "/home/sean/github/rh-k12/augur/tasks/github/releases/core.py", line 89, in insert_release release_inf = get_release_inf(session, repo_id, release, tag_only) File "/home/sean/github/rh-k12/augur/tasks/github/releases/core.py", line 57, in get_release_inf author = name + email TypeError: unsupported operand type(s) for +: 'NoneType' and 'str' ``` --- augur/tasks/github/releases/core.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 30da72d4e6..96ac2b20e0 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -54,7 +54,15 @@ def get_release_inf(session, repo_id, release, tag_only): email = '_' + release['target']['tagger']['email'] else: email = "noemail" - author = name + email + + if name=None or email=None: + if name=None: + name="nobody" + if email=None: + email="noemail" + author = name + email + else: + author = name + email if 'date' in release['target']['tagger']: date = release['target']['tagger']['date'] else: From 1cf57cc4aa9dcebbeb152ea4b1a3b465cfa1c12e Mon Sep 17 00:00:00 2001 From: Sean Goggins Date: Mon, 23 Jan 2023 10:39:59 -0600 Subject: [PATCH 021/134] undoing a bad fix. I fixed this last night but didn't pull to k12. Its fixed on the dev branch that's in ag3. --- augur/tasks/github/releases/core.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 96ac2b20e0..7f994964d5 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -55,14 +55,6 @@ def get_release_inf(session, repo_id, release, tag_only): else: email = "noemail" - if name=None or email=None: - if name=None: - name="nobody" - if email=None: - email="noemail" - author = name + email - else: - author = name + email if 'date' in release['target']['tagger']: date = release['target']['tagger']['date'] else: From a7c51b7cd329b2e4820702f40d3963ab3174ea1a Mon Sep 17 00:00:00 2001 From: Sean Goggins Date: Mon, 23 Jan 2023 10:41:36 -0600 Subject: [PATCH 022/134] correct fix. --- augur/tasks/github/releases/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 7f994964d5..139644eb06 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -54,6 +54,7 @@ def get_release_inf(session, repo_id, release, tag_only): email = '_' + release['target']['tagger']['email'] else: email = "noemail" + author = name + email if 'date' in release['target']['tagger']: date = release['target']['tagger']['date'] From d653108e7017f5f5a92d19c32f19695e8d635770 Mon Sep 17 00:00:00 2001 From: Sean Goggins Date: Mon, 23 Jan 2023 10:47:05 -0600 Subject: [PATCH 023/134] Missed NoneType error potential. --- augur/tasks/github/releases/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index 139644eb06..df4550bd1f 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -61,7 +61,7 @@ def get_release_inf(session, repo_id, release, tag_only): else: date = "" else: - author = "" + author = "nobody" date = "" release_inf = { 'release_id': release['id'], From af1740aefb0a0fe1be8167fed7420b9c8701a179 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Mon, 23 Jan 2023 20:42:06 -0600 Subject: [PATCH 024/134] Release fix (#2141) * Release fix' Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * err handling Signed-off-by: Isaac Milarsky Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Signed-off-by: Isaac Milarsky Co-authored-by: Isaac Milarsky --- augur/tasks/github/events/tasks.py | 2 +- augur/tasks/github/releases/core.py | 20 +++++++------------- augur/tasks/github/releases/tasks.py | 6 +++++- 3 files changed, 13 insertions(+), 15 deletions(-) diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 5459788dc1..0b1242599e 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -1,6 +1,6 @@ import time import logging - +import traceback from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.data_parse import * diff --git a/augur/tasks/github/releases/core.py b/augur/tasks/github/releases/core.py index df4550bd1f..fa22dde271 100644 --- a/augur/tasks/github/releases/core.py +++ b/augur/tasks/github/releases/core.py @@ -46,20 +46,14 @@ def get_release_inf(session, repo_id, release, tag_only): } else: if 'tagger' in release['target']: - if 'name' in release['target']['tagger']: - name = release['target']['tagger']['name'] - else: - name = "nobody" - if 'email' in release['target']['tagger'] and release['target']['tagger']['email']: - email = '_' + release['target']['tagger']['email'] - else: - email = "noemail" - author = name + email - if 'date' in release['target']['tagger']: - date = release['target']['tagger']['date'] - else: - date = "" + tagger = release["target"]["tagger"] + + date = tagger.get("date") or "" + name = tagger.get("name") or "nobody" + email = tagger.get("email") or "noemail" + + author = name + "_" + email else: author = "nobody" date = "" diff --git a/augur/tasks/github/releases/tasks.py b/augur/tasks/github/releases/tasks.py index 6a9ed9a71f..a71f3da480 100644 --- a/augur/tasks/github/releases/tasks.py +++ b/augur/tasks/github/releases/tasks.py @@ -2,6 +2,7 @@ from augur.tasks.github.releases.core import * from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.util import execute_session_query +import traceback @celery.task def collect_releases(): @@ -12,4 +13,7 @@ def collect_releases(): repos = execute_session_query(query, 'all') for repo in repos: - releases_model(session, repo.repo_git, repo.repo_id) \ No newline at end of file + try: + releases_model(session, repo.repo_git, repo.repo_id) + except Exception as e: + logger.error(f"Could not collect releases for {repo.repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") From 6058080c145ea7a552076bb3912548060292e152 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Tue, 24 Jan 2023 11:21:51 -0600 Subject: [PATCH 025/134] committing materialized view to a fresh branch off of dev. (#2143) * committing materialized view to a fresh branch off of dev. * Change recommended by Andrew. * Fixing a glitch introduced by removing pointless function. Need to then call the actual function. --- .../4_explorer_materialized_view_update.py | 652 ++++++++++++++++++ 1 file changed, 652 insertions(+) create mode 100644 augur/application/schema/alembic/versions/4_explorer_materialized_view_update.py diff --git a/augur/application/schema/alembic/versions/4_explorer_materialized_view_update.py b/augur/application/schema/alembic/versions/4_explorer_materialized_view_update.py new file mode 100644 index 0000000000..bbe8a1d6a5 --- /dev/null +++ b/augur/application/schema/alembic/versions/4_explorer_materialized_view_update.py @@ -0,0 +1,652 @@ +"""Augur New Changes + +Revision ID: 4 +Revises: 3 +Create Date: 2023-01-24 09:17:31.706564 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql +from sqlalchemy.sql import text +from augur.tasks.util.AugurUUID import AugurUUID, GithubUUID, UnresolvableUUID + + +# revision identifiers, used by Alembic. +revision = '4' +down_revision = '3' +branch_labels = None +depends_on = None + + +def upgrade(): + + add_materialized_views_15() + +def downgrade(): + + upgrade=False + + add_materialized_views_15(upgrade) + +def add_materialized_views_15(upgrade=True): + + if upgrade: + conn = op.get_bind() + conn.execute(text(""" + drop materialized view if exists augur_data.explorer_commits_and_committers_daily_count; + drop materialized view if exists augur_data.api_get_all_repos_commits; + drop materialized view if exists augur_data.api_get_all_repos_issues; + drop materialized view if exists augur_data.augur_new_contributors; + drop materialized view if exists augur_data.explorer_contributor_actions; + drop materialized view if exists augur_data.explorer_entry_list; + drop materialized view if exists augur_data.explorer_libyear_all; + drop materialized view if exists augur_data.explorer_libyear_detail; + drop materialized view if exists augur_data.explorer_new_contributors; + drop materialized view if exists augur_data.api_get_all_repo_prs; + drop materialized view if exists augur_data.explorer_libyear_summary;""")) + + conn.execute(text(""" + create materialized view augur_data.api_get_all_repo_prs as + SELECT pull_requests.repo_id, + count(*) AS pull_requests_all_time + FROM augur_data.pull_requests + GROUP BY pull_requests.repo_id;""")) + + conn.execute(text(""" + create materialized view augur_data.explorer_entry_list as + + SELECT DISTINCT r.repo_git, + r.repo_id, + r.repo_name, + rg.rg_name + FROM (augur_data.repo r + JOIN augur_data.repo_groups rg ON ((rg.repo_group_id = r.repo_group_id))) + ORDER BY rg.rg_name;""")) + + conn.execute(text(""" + create materialized view augur_data.explorer_commits_and_committers_daily_count as + SELECT repo.repo_id, + repo.repo_name, + commits.cmt_committer_date, + count(commits.cmt_id) AS num_of_commits, + count(DISTINCT commits.cmt_committer_raw_email) AS num_of_unique_committers + FROM (augur_data.commits + LEFT JOIN augur_data.repo ON ((repo.repo_id = commits.repo_id))) + GROUP BY repo.repo_id, repo.repo_name, commits.cmt_committer_date + ORDER BY repo.repo_id, commits.cmt_committer_date;""")) + + conn.execute(text(""" + + create materialized view augur_data.api_get_all_repos_commits as + SELECT commits.repo_id, + count(DISTINCT commits.cmt_commit_hash) AS commits_all_time + FROM augur_data.commits + GROUP BY commits.repo_id;""")) + + + conn.execute(text(""" + + create materialized view augur_data.api_get_all_repos_issues as + SELECT issues.repo_id, + count(*) AS issues_all_time + FROM augur_data.issues + WHERE (issues.pull_request IS NULL) + GROUP BY issues.repo_id; """)) + + conn.execute(text(""" + + create materialized view augur_data.explorer_libyear_all as + SELECT a.repo_id, + a.repo_name, + avg(b.libyear) AS avg_libyear, + date_part('month'::text, (a.data_collection_date)::date) AS month, + date_part('year'::text, (a.data_collection_date)::date) AS year + FROM augur_data.repo a, + augur_data.repo_deps_libyear b + GROUP BY a.repo_id, a.repo_name, (date_part('month'::text, (a.data_collection_date)::date)), (date_part('year'::text, (a.data_collection_date)::date)) + ORDER BY (date_part('year'::text, (a.data_collection_date)::date)) DESC, (date_part('month'::text, (a.data_collection_date)::date)) DESC, (avg(b.libyear)) DESC; """)) + + conn.execute(text(""" + + create materialized view augur_data.explorer_libyear_summary as + SELECT a.repo_id, + a.repo_name, + avg(b.libyear) AS avg_libyear, + date_part('month'::text, (a.data_collection_date)::date) AS month, + date_part('year'::text, (a.data_collection_date)::date) AS year + FROM augur_data.repo a, + augur_data.repo_deps_libyear b + GROUP BY a.repo_id, a.repo_name, (date_part('month'::text, (a.data_collection_date)::date)), (date_part('year'::text, (a.data_collection_date)::date)) + ORDER BY (date_part('year'::text, (a.data_collection_date)::date)) DESC, (date_part('month'::text, (a.data_collection_date)::date)) DESC, (avg(b.libyear)) DESC;""")) + + conn.execute(text(""" + + create materialized view augur_data.explorer_libyear_detail as + SELECT a.repo_id, + a.repo_name, + b.name, + b.requirement, + b.current_verion, + b.latest_version, + b.current_release_date, + b.libyear, + max(b.data_collection_date) AS max + FROM augur_data.repo a, + augur_data.repo_deps_libyear b + GROUP BY a.repo_id, a.repo_name, b.name, b.requirement, b.current_verion, b.latest_version, b.current_release_date, b.libyear + ORDER BY a.repo_id, b.requirement; """)) + + conn.execute(text(""" + + create materialized view augur_data.explorer_new_contributors as + SELECT x.cntrb_id, + x.created_at, + x.month, + x.year, + x.repo_id, + x.repo_name, + x.full_name, + x.login, + x.rank + FROM ( SELECT b.cntrb_id, + b.created_at, + b.month, + b.year, + b.repo_id, + b.repo_name, + b.full_name, + b.login, + b.action, + b.rank + FROM ( SELECT a.id AS cntrb_id, + a.created_at, + date_part('month'::text, (a.created_at)::date) AS month, + date_part('year'::text, (a.created_at)::date) AS year, + a.repo_id, + repo.repo_name, + a.full_name, + a.login, + a.action, + rank() OVER (PARTITION BY a.id ORDER BY a.created_at) AS rank + FROM ( SELECT canonical_full_names.canonical_id AS id, + issues.created_at, + issues.repo_id, + 'issue_opened'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.issues + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = issues.reporter_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE (issues.pull_request IS NULL) + GROUP BY canonical_full_names.canonical_id, issues.repo_id, issues.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT canonical_full_names.canonical_id AS id, + to_timestamp((commits.cmt_author_date)::text, 'YYYY-MM-DD'::text) AS created_at, + commits.repo_id, + 'commit'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.commits + LEFT JOIN augur_data.contributors ON (((contributors.cntrb_canonical)::text = (commits.cmt_author_email)::text))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + GROUP BY commits.repo_id, canonical_full_names.canonical_email, canonical_full_names.canonical_id, commits.cmt_author_date, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT message.cntrb_id AS id, + commit_comment_ref.created_at, + commits.repo_id, + 'commit_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.commit_comment_ref, + augur_data.commits, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((commits.cmt_id = commit_comment_ref.cmt_id) AND (commit_comment_ref.msg_id = message.msg_id)) + GROUP BY message.cntrb_id, commits.repo_id, commit_comment_ref.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT issue_events.cntrb_id AS id, + issue_events.created_at, + issues.repo_id, + 'issue_closed'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.issues, + ((augur_data.issue_events + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = issue_events.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((issues.issue_id = issue_events.issue_id) AND (issues.pull_request IS NULL) AND (issue_events.cntrb_id IS NOT NULL) AND ((issue_events.action)::text = 'closed'::text)) + GROUP BY issue_events.cntrb_id, issues.repo_id, issue_events.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT pull_requests.pr_augur_contributor_id AS id, + pull_requests.pr_created_at AS created_at, + pull_requests.repo_id, + 'open_pull_request'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.pull_requests + LEFT JOIN augur_data.contributors ON ((pull_requests.pr_augur_contributor_id = contributors.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + GROUP BY pull_requests.pr_augur_contributor_id, pull_requests.repo_id, pull_requests.pr_created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT message.cntrb_id AS id, + message.msg_timestamp AS created_at, + pull_requests.repo_id, + 'pull_request_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.pull_requests, + augur_data.pull_request_message_ref, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((pull_request_message_ref.pull_request_id = pull_requests.pull_request_id) AND (pull_request_message_ref.msg_id = message.msg_id)) + GROUP BY message.cntrb_id, pull_requests.repo_id, message.msg_timestamp, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT issues.reporter_id AS id, + message.msg_timestamp AS created_at, + issues.repo_id, + 'issue_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.issues, + augur_data.issue_message_ref, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((issue_message_ref.msg_id = message.msg_id) AND (issues.issue_id = issue_message_ref.issue_id) AND (issues.pull_request_id = NULL::bigint)) + GROUP BY issues.reporter_id, issues.repo_id, message.msg_timestamp, contributors.cntrb_full_name, contributors.cntrb_login) a, + augur_data.repo + WHERE ((a.id IS NOT NULL) AND (a.repo_id = repo.repo_id)) + GROUP BY a.id, a.repo_id, a.action, a.created_at, repo.repo_name, a.full_name, a.login + ORDER BY a.id) b + WHERE (b.rank = ANY (ARRAY[(1)::bigint, (2)::bigint, (3)::bigint, (4)::bigint, (5)::bigint, (6)::bigint, (7)::bigint]))) x;""")) + + conn.execute(text(""" + + create materialized view augur_data.augur_new_contributors as + SELECT x.cntrb_id, + x.created_at, + x.month, + x.year, + x.repo_id, + x.repo_name, + x.full_name, + x.login, + x.rank + FROM ( SELECT b.cntrb_id, + b.created_at, + b.month, + b.year, + b.repo_id, + b.repo_name, + b.full_name, + b.login, + b.action, + b.rank + FROM ( SELECT a.id AS cntrb_id, + a.created_at, + date_part('month'::text, (a.created_at)::date) AS month, + date_part('year'::text, (a.created_at)::date) AS year, + a.repo_id, + repo.repo_name, + a.full_name, + a.login, + a.action, + rank() OVER (PARTITION BY a.id ORDER BY a.created_at) AS rank + FROM ( SELECT canonical_full_names.canonical_id AS id, + issues.created_at, + issues.repo_id, + 'issue_opened'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.issues + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = issues.reporter_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE (issues.pull_request IS NULL) + GROUP BY canonical_full_names.canonical_id, issues.repo_id, issues.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT canonical_full_names.canonical_id AS id, + to_timestamp((commits.cmt_author_date)::text, 'YYYY-MM-DD'::text) AS created_at, + commits.repo_id, + 'commit'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.commits + LEFT JOIN augur_data.contributors ON (((contributors.cntrb_email)::text = (commits.cmt_author_email)::text))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + GROUP BY commits.repo_id, canonical_full_names.canonical_email, canonical_full_names.canonical_id, commits.cmt_author_date, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT message.cntrb_id AS id, + commit_comment_ref.created_at, + commits.repo_id, + 'commit_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.commit_comment_ref, + augur_data.commits, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((commits.cmt_id = commit_comment_ref.cmt_id) AND (commit_comment_ref.msg_id = message.msg_id)) + GROUP BY message.cntrb_id, commits.repo_id, commit_comment_ref.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT issue_events.cntrb_id AS id, + issue_events.created_at, + issues.repo_id, + 'issue_closed'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.issues, + ((augur_data.issue_events + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = issue_events.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((issues.issue_id = issue_events.issue_id) AND (issues.pull_request IS NULL) AND (issue_events.cntrb_id IS NOT NULL) AND ((issue_events.action)::text = 'closed'::text)) + GROUP BY issue_events.cntrb_id, issues.repo_id, issue_events.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT pull_requests.pr_augur_contributor_id AS id, + pull_requests.pr_created_at AS created_at, + pull_requests.repo_id, + 'open_pull_request'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.pull_requests + LEFT JOIN augur_data.contributors ON ((pull_requests.pr_augur_contributor_id = contributors.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + GROUP BY pull_requests.pr_augur_contributor_id, pull_requests.repo_id, pull_requests.pr_created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT message.cntrb_id AS id, + message.msg_timestamp AS created_at, + pull_requests.repo_id, + 'pull_request_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.pull_requests, + augur_data.pull_request_message_ref, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((pull_request_message_ref.pull_request_id = pull_requests.pull_request_id) AND (pull_request_message_ref.msg_id = message.msg_id)) + GROUP BY message.cntrb_id, pull_requests.repo_id, message.msg_timestamp, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT issues.reporter_id AS id, + message.msg_timestamp AS created_at, + issues.repo_id, + 'issue_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.issues, + augur_data.issue_message_ref, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((issue_message_ref.msg_id = message.msg_id) AND (issues.issue_id = issue_message_ref.issue_id) AND (issues.pull_request_id = NULL::bigint)) + GROUP BY issues.reporter_id, issues.repo_id, message.msg_timestamp, contributors.cntrb_full_name, contributors.cntrb_login) a, + augur_data.repo + WHERE ((a.id IS NOT NULL) AND (a.repo_id = repo.repo_id)) + GROUP BY a.id, a.repo_id, a.action, a.created_at, repo.repo_name, a.full_name, a.login + ORDER BY a.id) b + WHERE (b.rank = ANY (ARRAY[(1)::bigint, (2)::bigint, (3)::bigint, (4)::bigint, (5)::bigint, (6)::bigint, (7)::bigint]))) x; """)) + + conn.execute(text(""" + create materialized view augur_data.explorer_contributor_actions as + SELECT x.cntrb_id, + x.created_at, + x.repo_id, + x.login, + x.action, + x.rank + FROM ( SELECT b.cntrb_id, + b.created_at, + b.month, + b.year, + b.repo_id, + b.repo_name, + b.full_name, + b.login, + b.action, + b.rank + FROM ( SELECT a.id AS cntrb_id, + a.created_at, + date_part('month'::text, (a.created_at)::date) AS month, + date_part('year'::text, (a.created_at)::date) AS year, + a.repo_id, + repo.repo_name, + a.full_name, + a.login, + a.action, + rank() OVER (PARTITION BY a.id, a.repo_id ORDER BY a.created_at) AS rank + FROM ( SELECT canonical_full_names.canonical_id AS id, + issues.created_at, + issues.repo_id, + 'issue_opened'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.issues + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = issues.reporter_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE (issues.pull_request IS NULL) + GROUP BY canonical_full_names.canonical_id, issues.repo_id, issues.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT canonical_full_names.canonical_id AS id, + to_timestamp((commits.cmt_author_date)::text, 'YYYY-MM-DD'::text) AS created_at, + commits.repo_id, + 'commit'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.commits + LEFT JOIN augur_data.contributors ON (((contributors.cntrb_canonical)::text = (commits.cmt_author_email)::text))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_canonical)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + GROUP BY commits.repo_id, canonical_full_names.canonical_email, canonical_full_names.canonical_id, commits.cmt_author_date, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT message.cntrb_id AS id, + commit_comment_ref.created_at, + commits.repo_id, + 'commit_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.commit_comment_ref, + augur_data.commits, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((commits.cmt_id = commit_comment_ref.cmt_id) AND (commit_comment_ref.msg_id = message.msg_id)) + GROUP BY message.cntrb_id, commits.repo_id, commit_comment_ref.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT issue_events.cntrb_id AS id, + issue_events.created_at, + issues.repo_id, + 'issue_closed'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.issues, + ((augur_data.issue_events + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = issue_events.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((issues.issue_id = issue_events.issue_id) AND (issues.pull_request IS NULL) AND (issue_events.cntrb_id IS NOT NULL) AND ((issue_events.action)::text = 'closed'::text)) + GROUP BY issue_events.cntrb_id, issues.repo_id, issue_events.created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT pull_requests.pr_augur_contributor_id AS id, + pull_requests.pr_created_at AS created_at, + pull_requests.repo_id, + 'open_pull_request'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM ((augur_data.pull_requests + LEFT JOIN augur_data.contributors ON ((pull_requests.pr_augur_contributor_id = contributors.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + GROUP BY pull_requests.pr_augur_contributor_id, pull_requests.repo_id, pull_requests.pr_created_at, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT message.cntrb_id AS id, + message.msg_timestamp AS created_at, + pull_requests.repo_id, + 'pull_request_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.pull_requests, + augur_data.pull_request_message_ref, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((pull_request_message_ref.pull_request_id = pull_requests.pull_request_id) AND (pull_request_message_ref.msg_id = message.msg_id)) + GROUP BY message.cntrb_id, pull_requests.repo_id, message.msg_timestamp, contributors.cntrb_full_name, contributors.cntrb_login + UNION ALL + SELECT issues.reporter_id AS id, + message.msg_timestamp AS created_at, + issues.repo_id, + 'issue_comment'::text AS action, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM augur_data.issues, + augur_data.issue_message_ref, + ((augur_data.message + LEFT JOIN augur_data.contributors ON ((contributors.cntrb_id = message.cntrb_id))) + LEFT JOIN ( SELECT DISTINCT ON (contributors_1.cntrb_canonical) contributors_1.cntrb_full_name, + contributors_1.cntrb_canonical AS canonical_email, + contributors_1.data_collection_date, + contributors_1.cntrb_id AS canonical_id + FROM augur_data.contributors contributors_1 + WHERE ((contributors_1.cntrb_canonical)::text = (contributors_1.cntrb_email)::text) + ORDER BY contributors_1.cntrb_canonical) canonical_full_names ON (((canonical_full_names.canonical_email)::text = (contributors.cntrb_canonical)::text))) + WHERE ((issue_message_ref.msg_id = message.msg_id) AND (issues.issue_id = issue_message_ref.issue_id) AND (issues.pull_request_id = NULL::bigint)) + GROUP BY issues.reporter_id, issues.repo_id, message.msg_timestamp, contributors.cntrb_full_name, contributors.cntrb_login) a, + augur_data.repo + WHERE ((a.id IS NOT NULL) AND (a.repo_id = repo.repo_id)) + GROUP BY a.id, a.repo_id, a.action, a.created_at, repo.repo_name, a.full_name, a.login + ORDER BY a.created_at DESC) b) x + ORDER BY x.created_at DESC; """)) + else: + + conn = op.get_bind() + conn.execute(text("""drop materialized view if exists augur_data.explorer_commits_and_committers_daily_count;""")) + conn.execute(text("""drop materialized view if exists augur_data.api_get_all_repos_commits;""")) + conn.execute(text("""drop materialized view if exists augur_data.api_get_all_repos_issues;""")) + conn.execute(text("""drop materialized view if exists augur_data.augur_new_contributors;""")) + conn.execute(text("""drop materialized view if exists augur_data.explorer_contributor_actions;""")) + conn.execute(text("""drop materialized view if exists augur_data.explorer_entry_list;""")) + conn.execute(text("""drop materialized view if exists augur_data.explorer_libyear_all;""")) + conn.execute(text("""drop materialized view if exists augur_data.explorer_libyear_detail;""")) + conn.execute(text("""drop materialized view if exists augur_data.explorer_libyear_summary;""")) + conn.execute(text("""drop materialized view if exists augur_data.explorer_new_contributors;""")) + conn.execute(text("""drop materialized view if exists augur_data.api_get_all_repo_prs;""")) From 714bf0703b43cad364180f5bbac621516cdad1d8 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Wed, 25 Jan 2023 10:55:12 -0600 Subject: [PATCH 026/134] Merge rest of Andrew & Isaac Changes (#2146) * Add user group functionality to repo load controller Signed-off-by: Andrew Brain * Add user group table Signed-off-by: Andrew Brain * Changes for user groups Signed-off-by: Andrew Brain * Start working on converting old dbs to new version Signed-off-by: Andrew Brain * Add script to upgrade database Signed-off-by: Andrew Brain * Fix up downgrade and upgrade script Signed-off-by: Andrew Brain * Remove prints from script Signed-off-by: Andrew Brain * Fixes to repo insertion methods Signed-off-by: Andrew Brain * First run of adding repos to groups Signed-off-by: Andrew Brain * Match the group id data types Signed-off-by: Andrew Brain * Major improvements to user group functionality Signed-off-by: Andrew Brain * Pass more repo load controller tests Signed-off-by: Andrew Brain * Move around tests for readability Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests Signed-off-by: Andrew Brain * Add more tests to repo load controller Signed-off-by: Andrew Brain * Add more tests to repo load controller Signed-off-by: Andrew Brain * Fix deleting user errors Signed-off-by: Andrew Brain * Small fixes to user endpoints Signed-off-by: Andrew Brain * scaling fix for repo_move Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Add more tests for coverage Signed-off-by: Andrew Brain * Add more endpoints to get the group and repo data for the frontend Signed-off-by: Andrew Brain * Add documentation and update User endpoints: - Make ancillary arguments optional for group_repos - Add documentation clarifying new repo group endpoints * Change to rabbitmq broker Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Add docs and a few fixes Signed-off-by: Andrew Brain * don't ignore result Signed-off-by: Isaac Milarsky * More logging in detect_github_repo_move Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * More oauth work * print Signed-off-by: Isaac Milarsky * Add auth to endpoints Signed-off-by: Andrew Brain * re-add facade contributors to task queue Signed-off-by: Isaac Milarsky * better handling and logging files model Signed-off-by: Isaac Milarsky * Remove unneeded file Signed-off-by: Andrew Brain * take advantage of rabbitmq allowing us to use celery result Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * Initial integration and testing * Convert augur view login logic to the user orm model Signed-off-by: Andrew Brain * Outline user methods on the user orm class Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Update routes to use orm functions Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Fix some login bugs Signed-off-by: Andrew Brain * Add function to paginate all repos, user repos and group repos Signed-off-by: Andrew Brain * A functions to paginate user, group and all repos Signed-off-by: Andrew Brain * Fix syntax error Signed-off-by: Andrew Brain * Fixes Signed-off-by: Andrew Brain * Fix various bugs Signed-off-by: Andrew Brain * Remove prints Signed-off-by: Andrew Brain * Make json endpoints only work when logged in Signed-off-by: Andrew Brain * Return error if user is not logged in when using the api Signed-off-by: Andrew Brain * Add more function to orm Signed-off-by: Andrew Brain * Integration work * Further integration testing and stability improvements Signed-off-by: Ulincsys <28362836a@gmail.com> * Imporove error logging and fix error when loading a user or group repos Signed-off-by: Andrew Brain * Fix small errors in user login and improve logging Signed-off-by: Andrew Brain * Fix user deletion and improve logging when there are no valid github api keys Signed-off-by: Andrew Brain * Fix get repo by id in repo model Signed-off-by: Andrew Brain * Stability improvements Signed-off-by: Ulincsys <28362836a@gmail.com> * get rid of redundant definition Signed-off-by: Isaac Milarsky * docs update Signed-off-by: Isaac Milarsky * further improvements Signed-off-by: Ulincsys <28362836a@gmail.com> * Track templates directory Signed-off-by: Ulincsys <28362836a@gmail.com> * Make default group allowed, and return user group exists if it does Signed-off-by: Andrew Brain * Change celery task scheduling to not scale proportionally to the amount of repos Signed-off-by: Isaac Milarsky * Fix errors in the api Signed-off-by: Andrew Brain * User function improvements Signed-off-by: Andrew Brain * Remove print Signed-off-by: Andrew Brain * analysis sequence pooling for facade scaling Signed-off-by: Isaac Milarsky * need to fix issues with accessing redis Signed-off-by: Isaac Milarsky * don't create so many sessions Signed-off-by: Isaac Milarsky * Add database changes and fixes to the api Signed-off-by: Andrew Brain * Added refresh endpoint Signed-off-by: Ulincsys * Update Signed-off-by: Isaac Milarsky * Add code for refresh tokens Signed-off-by: Andrew Brain * Update auth requirements Signed-off-by: Ulincsys * Implement group favorite functionality Signed-off-by: Andrew Brain * doc update * Make session tokens expire Signed-off-by: Andrew Brain * fix facade date query error Signed-off-by: Isaac Milarsky * Fix error in unathorized handler Signed-off-by: Andrew Brain * remove excessive facade logging Signed-off-by: Isaac Milarsky * remove excessive facade logging Signed-off-by: Isaac Milarsky * updating MQ and REDIS Docs * Fix refresh endpoint response Signed-off-by: Ulincsys * Fix conflicts Signed-off-by: Andrew Brain * Add reverted changes Signed-off-by: Ulincsys * Updates to docs. * Fix schema revisions Signed-off-by: Andrew Brain * documentation updatese * test * Add pointer class to star, implement API toggle call Signed-off-by: Ulincsys * Fix error where frontend user recieved json Signed-off-by: Andrew Brain * documentation updates * doc hell * trying * Add fixes for oauth Signed-off-by: Andrew Brain * analyze_commits_in_parallel now shows progress in quarters in the logs. Also applied same scaling changes to facade contributor resolution in insert_facade_contributors Signed-off-by: Isaac Milarsky * sql format Signed-off-by: Isaac Milarsky * Typo Signed-off-by: Isaac Milarsky * skeleton for deps worker Signed-off-by: Isaac Milarsky * Better error handling Signed-off-by: Isaac Milarsky * add dependency util files from main-old Signed-off-by: Isaac Milarsky * Dependency worker Signed-off-by: Isaac Milarsky * add dependency model to repo_collect Signed-off-by: Isaac Milarsky * Syntax Signed-off-by: Isaac Milarsky * Facade tasks not getting ran for some reason Signed-off-by: Isaac Milarsky * add file Signed-off-by: Isaac Milarsky * Remove bind from gunicorn command because it is specified in the gunicorn_conf Signed-off-by: Andrew Brain * Remove config from database session object Signed-off-by: Andrew Brain * Remvove unneeded depencies form db connection code Signed-off-by: Andrew Brain * Make it possible to user the DatabaseSession in the orm models Signed-off-by: Andrew Brain * Clean up tests, refactor some poor design choices Signed-off-by: Andrew Brain * python import Signed-off-by: Isaac Milarsky * make sure rabbitmq messages are cleared Signed-off-by: Isaac Milarsky * schedule less at once Signed-off-by: Isaac Milarsky * Grab correct vhost from config Signed-off-by: Isaac Milarsky * optimistic Signed-off-by: Isaac Milarsky * Remove config from server object since it is unneeded Signed-off-by: Andrew Brain * Fix 2 database connections staying open after starting augur Signed-off-by: Andrew Brain * Fix small errors Signed-off-by: Andrew Brain * Clean up GithubTaskSession objects when only a DatabaseSession is needed Signed-off-by: Andrew Brain * Pass celery's globabl db engine to all sessions within tasks Signed-off-by: Andrew Brain * Change repo_collect to split up task load for smaller message Signed-off-by: Isaac Milarsky * version Signed-off-by: Isaac Milarsky * debug Signed-off-by: Isaac Milarsky * Low load patch Signed-off-by: Isaac Milarsky * Shrink facade messages Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * syntax Signed-off-by: Isaac Milarsky * increase message load Signed-off-by: Isaac Milarsky * Re-add the rabbitmq instructions and updated docker instructions Signed-off-by: Isaac Milarsky * Make facade not block collection Signed-off-by: Isaac Milarsky * typo Signed-off-by: Isaac Milarsky * prevent division by zero Signed-off-by: Isaac Milarsky * Fix errors caused by merge Signed-off-by: Andrew Brain * Fix various errors Signed-off-by: Andrew Brain * releases err handle Signed-off-by: Isaac Milarsky * 50 not 5 Signed-off-by: Isaac Milarsky * merge Signed-off-by: Isaac Milarsky * weird github response handle Signed-off-by: Isaac Milarsky * cleanup logs Signed-off-by: Isaac Milarsky * more conflicts Signed-off-by: Isaac Milarsky * more merges Signed-off-by: Isaac Milarsky * more merge conflicts Signed-off-by: Isaac Milarsky * Make operational error more explicit Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain Signed-off-by: Isaac Milarsky Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Signed-off-by: Ulincsys <28362836a@gmail.com> Signed-off-by: Isaac Milarsky Signed-off-by: Ulincsys Co-authored-by: Isaac Milarsky Co-authored-by: Ulincsys <28362836a@gmail.com> Co-authored-by: Ulincsys Co-authored-by: Isaac Milarsky Co-authored-by: Sean Goggins Co-authored-by: Sean P. Goggins --- augur/api/gunicorn_conf.py | 24 +- augur/api/routes/config.py | 3 +- augur/api/routes/user.py | 12 +- augur/api/routes/util.py | 3 +- augur/api/server.py | 14 +- augur/api/view/api.py | 7 +- augur/api/view/routes.py | 55 +- augur/application/cli/__init__.py | 3 +- augur/application/cli/backend.py | 71 +- augur/application/cli/config.py | 14 +- augur/application/config.py | 4 +- augur/application/db/data_parse.py | 4 - augur/application/db/engine.py | 13 +- augur/application/db/models/__init__.py | 2 + augur/application/db/models/augur_data.py | 178 +++- .../application/db/models/augur_operations.py | 514 ++++++++---- augur/application/db/session.py | 18 +- augur/application/db/util.py | 4 +- augur/application/util.py | 6 +- .../data_analysis/clustering_worker/tasks.py | 21 +- .../contributor_breadth_worker.py | 4 +- .../data_analysis/discourse_analysis/tasks.py | 6 +- .../data_analysis/insight_worker/tasks.py | 29 +- .../data_analysis/message_insights/tasks.py | 13 +- .../pull_request_analysis_worker/tasks.py | 11 +- augur/tasks/db/refresh_materialized_views.py | 4 +- augur/tasks/git/dependency_tasks/core.py | 5 +- .../dependency_util/python_deps.py | 4 +- augur/tasks/git/facade_tasks.py | 88 +- .../facade_worker/facade01config.py | 13 +- augur/tasks/github/detect_move/tasks.py | 4 +- augur/tasks/github/events/tasks.py | 29 +- augur/tasks/github/facade_github/tasks.py | 4 +- augur/tasks/github/issues/tasks.py | 33 +- augur/tasks/github/messages/tasks.py | 30 +- .../pull_requests/commits_model/core.py | 4 +- .../pull_requests/commits_model/tasks.py | 23 +- augur/tasks/github/pull_requests/core.py | 8 +- .../github/pull_requests/files_model/core.py | 4 +- .../github/pull_requests/files_model/tasks.py | 23 +- augur/tasks/github/pull_requests/tasks.py | 33 +- augur/tasks/github/repo_info/tasks.py | 3 +- .../github/util/github_api_key_handler.py | 7 +- augur/tasks/github/util/github_paginator.py | 20 +- .../tasks/github/util/github_task_session.py | 2 - augur/tasks/init/celery_app.py | 5 +- augur/tasks/start_tasks.py | 14 +- augur/tasks/util/worker_util.py | 1 - augur/util/repo_load_controller.py | 429 +--------- .../test_cli/test_add_cli_repos.py | 140 ++++ .../test_models/test_augur_data/test_repo.py | 142 ++++ .../test_augur_data/test_repo_group.py | 49 ++ .../test_augur_operations/test_user.py | 173 ++++ .../test_augur_operations/test_user_group.py | 237 ++++++ .../test_augur_operations/test_user_repo.py | 323 ++++++++ .../test_repo_load_controller/helper.py | 8 +- .../test_adding_orgs.py | 137 +-- .../test_helper_functions.py | 782 ++++-------------- .../test_repo_load_controller.py | 521 ------------ 59 files changed, 2147 insertions(+), 2188 deletions(-) create mode 100644 tests/test_applicaton/test_cli/test_add_cli_repos.py create mode 100644 tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py create mode 100644 tests/test_applicaton/test_db/test_models/test_augur_data/test_repo_group.py create mode 100644 tests/test_applicaton/test_db/test_models/test_augur_operations/test_user.py create mode 100644 tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_group.py create mode 100644 tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_repo.py delete mode 100644 tests/test_applicaton/test_repo_load_controller/test_repo_load_controller.py diff --git a/augur/api/gunicorn_conf.py b/augur/api/gunicorn_conf.py index 34a5b394a6..0a98f08615 100644 --- a/augur/api/gunicorn_conf.py +++ b/augur/api/gunicorn_conf.py @@ -6,9 +6,13 @@ import shutil from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig logger = logging.getLogger(__name__) with DatabaseSession(logger) as session: + + augur_config = AugurConfig(logger, session) + # ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) @@ -21,21 +25,21 @@ reload = True # set the log location for gunicorn - logs_directory = session.config.get_value('Logging', 'logs_directory') + logs_directory = augur_config.get_value('Logging', 'logs_directory') accesslog = f"{logs_directory}/gunicorn.log" errorlog = f"{logs_directory}/gunicorn.log" - ssl_bool = session.config.get_value('Server', 'ssl') + ssl_bool = augur_config.get_value('Server', 'ssl') if ssl_bool is True: - workers = int(session.config.get_value('Server', 'workers')) - bind = '%s:%s' % (session.config.get_value("Server", "host"), session.config.get_value("Server", "port")) - timeout = int(session.config.get_value('Server', 'timeout')) - certfile = str(session.config.get_value('Server', 'ssl_cert_file')) - keyfile = str(session.config.get_value('Server', 'ssl_key_file')) + workers = int(augur_config.get_value('Server', 'workers')) + bind = '%s:%s' % (augur_config.get_value("Server", "host"), augur_config.get_value("Server", "port")) + timeout = int(augur_config.get_value('Server', 'timeout')) + certfile = str(augur_config.get_value('Server', 'ssl_cert_file')) + keyfile = str(augur_config.get_value('Server', 'ssl_key_file')) else: - workers = int(session.config.get_value('Server', 'workers')) - bind = '%s:%s' % (session.config.get_value("Server", "host"), session.config.get_value("Server", "port")) - timeout = int(session.config.get_value('Server', 'timeout')) + workers = int(augur_config.get_value('Server', 'workers')) + bind = '%s:%s' % (augur_config.get_value("Server", "host"), augur_config.get_value("Server", "port")) + timeout = int(augur_config.get_value('Server', 'timeout')) diff --git a/augur/api/routes/config.py b/augur/api/routes/config.py index 08bb92d06b..968d0de417 100644 --- a/augur/api/routes/config.py +++ b/augur/api/routes/config.py @@ -12,6 +12,7 @@ # Disable the requirement for SSL by setting env["AUGUR_DEV"] = True from augur.application.config import get_development_flag from augur.application.db.models import Config +from augur.application.config import AugurConfig from augur.application.db.session import DatabaseSession logger = logging.getLogger(__name__) @@ -35,7 +36,7 @@ def get_config(): with DatabaseSession(logger) as session: - config_dict = session.config.load_config() + config_dict = AugurConfig(logger, session).config.load_config() return jsonify(config_dict), 200 diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 917b419f33..1d7c689166 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -101,6 +101,7 @@ def validate_user(): return jsonify({"status": "Validated"}) + @server.app.route(f"/{AUGUR_API_VERSION}/user/logout", methods=['POST']) @login_required def logout_user_func(): @@ -447,13 +448,7 @@ def get_user_groups(): @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/repos/", methods=['GET', 'POST']) @login_required def get_user_groups_and_repos(): - """Get a list of user groups and their repos - - Returns - ------- - list - A list with this strucutre : [{"": Cache: server cache """ - expire = int(self.config.get_value('Server', 'cache_expire')) - server_cache = self.cache_manager.get_cache('server', expire=expire) - server_cache.clear() + with DatabaseSession(self.logger) as session: + config = AugurConfig(self.logger, session) + + expire = int(config.get_value('Server', 'cache_expire')) + server_cache = self.cache_manager.get_cache('server', expire=expire) + server_cache.clear() return server_cache diff --git a/augur/api/view/api.py b/augur/api/view/api.py index 2b9b2f5dd2..721c8164ef 100644 --- a/augur/api/view/api.py +++ b/augur/api/view/api.py @@ -1,6 +1,7 @@ from flask import Flask, render_template, render_template_string, request, abort, jsonify, redirect, url_for, session, flash from flask_login import current_user, login_required -from augur.util.repo_load_controller import parse_org_url, parse_repo_url +from augur.application.db.models import Repo +# from augur.util.repo_load_controller import parse_org_url, parse_repo_url from .utils import * def create_routes(server): @@ -22,10 +23,10 @@ def av_add_user_repo(): if not url or not group: flash("Repo or org URL must not be empty") - elif parse_org_url(url): + elif Repo.parse_github_org_url(url): current_user.add_org(group, url) flash("Successfully added org") - elif parse_repo_url(url): + elif Repo.parse_github_repo_url(url): current_user.add_repo(group, url) flash("Successfully added repo") else: diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 2600160396..0b66401612 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -13,8 +13,6 @@ logger = logging.getLogger(__name__) -with DatabaseSession(logger) as db_session: - config = AugurConfig(logger, db_session) # ROUTES ----------------------------------------------------------------------- @@ -71,8 +69,12 @@ def repo_table_view(): rev = True direction = "DESC" if rev else "ASC" - - pagination_offset = config.get_value("frontend", "pagination_offset") + + with DatabaseSession(logger) as db_session: + config = AugurConfig(logger, db_session) + + pagination_offset = config.get_value("frontend", "pagination_offset") + if current_user.is_authenticated: data = current_user.get_repos(page = page, sort = sorting, direction = direction)[0] @@ -153,26 +155,28 @@ def user_login(): if username is None: raise LoginException("A login issue occurred") - user = User.get_user(username) - if not user and register is None: - raise LoginException("Invalid login credentials") - - # register a user - if register is not None: - if user: - raise LoginException("User already exists") + with DatabaseSession(logger) as db_session: + user = User.get_user(db_session, username) + + if not user and register is None: + raise LoginException("Invalid login credentials") - email = request.form.get('email') - first_name = request.form.get('first_name') - last_name = request.form.get('last_name') - admin = request.form.get('admin') or False - - result = User.create_user(username, password, email, first_name, last_name, admin) - if not result[0]: - raise LoginException("An error occurred registering your account") - else: - user = User.get_user(username) - flash(result[1]["status"]) + # register a user + if register is not None: + if user: + raise LoginException("User already exists") + + email = request.form.get('email') + first_name = request.form.get('first_name') + last_name = request.form.get('last_name') + admin = request.form.get('admin') or False + + result = User.create_user(db_session, username, password, email, first_name, last_name, admin) + if not result[0]: + raise LoginException("An error occurred registering your account") + else: + user = User.get_user(username) + flash(result[1]["status"]) # Log the user in if the password is valid if user.validate(password) and login_user(user, remember = remember): @@ -285,7 +289,10 @@ def user_group_view(): rev = True params["direction"] = "DESC" - pagination_offset = config.get_value("frontend", "pagination_offset") + with DatabaseSession(logger) as db_session: + config = AugurConfig(logger, db_session) + + pagination_offset = config.get_value("frontend", "pagination_offset") data = current_user.get_group_repos(group, **params)[0] page_count = (current_user.get_group_repo_count(group)[0]) or 0 diff --git a/augur/application/cli/__init__.py b/augur/application/cli/__init__.py index 1f9fe1ef09..aaf548432a 100644 --- a/augur/application/cli/__init__.py +++ b/augur/application/cli/__init__.py @@ -19,7 +19,8 @@ def new_func(ctx, *args, **kwargs): #try to ping google's dns server socket.create_connection(("8.8.8.8",53)) return ctx.invoke(function_internet_connection, *args, **kwargs) - except OSError: + except OSError as e: + print(e) print(f"\n\n{usage} command setup failed\nYou are not connect to the internet. Please connect to the internet to run Augur\n") sys.exit() diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 3745dd6cf8..a80701e0c1 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -23,6 +23,7 @@ from augur.application.db.models import Repo from augur.application.db.session import DatabaseSession from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig from augur.application.cli import test_connection, test_db_connection @@ -55,45 +56,50 @@ def start(disable_collection, development, port): os.environ["AUGUR_DEV"] = "1" logger.info("Starting in development mode") - - with DatabaseSession(logger) as session: - + try: gunicorn_location = os.getcwd() + "/augur/api/gunicorn_conf.py" - host = session.config.get_value("Server", "host") + except FileNotFoundError: + logger.error("\n\nPlease run augur commands in the root directory\n\n") - if not port: - port = session.config.get_value("Server", "port") + db_session = DatabaseSession(logger) + config = AugurConfig(logger, db_session) + host = config.get_value("Server", "host") - gunicorn_command = f"gunicorn -c {gunicorn_location} -b {host}:{port} --preload augur.api.server:app" - server = subprocess.Popen(gunicorn_command.split(" ")) + if not port: + port = config.get_value("Server", "port") + + db_session.invalidate() + + gunicorn_command = f"gunicorn -c {gunicorn_location} -b {host}:{port} --preload augur.api.server:app" + server = subprocess.Popen(gunicorn_command.split(" ")) - time.sleep(3) - logger.info('Gunicorn webserver started...') - logger.info(f'Augur is running at: http://127.0.0.1:{session.config.get_value("Server", "port")}') + time.sleep(3) + logger.info('Gunicorn webserver started...') + logger.info(f'Augur is running at: http://127.0.0.1:{port}') - worker_1_process = None - cpu_worker_process = None - celery_beat_process = None - if not disable_collection: + worker_1_process = None + cpu_worker_process = None + celery_beat_process = None + if not disable_collection: - if os.path.exists("celerybeat-schedule.db"): - logger.info("Deleting old task schedule") - os.remove("celerybeat-schedule.db") + if os.path.exists("celerybeat-schedule.db"): + logger.info("Deleting old task schedule") + os.remove("celerybeat-schedule.db") - worker_1 = f"celery -A augur.tasks.init.celery_app.celery_app worker -P eventlet -l info --concurrency=100 -n {uuid.uuid4().hex}@%h" - cpu_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h -Q cpu" - worker_1_process = subprocess.Popen(worker_1.split(" ")) + worker_1 = f"celery -A augur.tasks.init.celery_app.celery_app worker -P eventlet -l info --concurrency=100 -n {uuid.uuid4().hex}@%h" + cpu_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h -Q cpu" + worker_1_process = subprocess.Popen(worker_1.split(" ")) - cpu_worker_process = subprocess.Popen(cpu_worker.split(" ")) - time.sleep(5) + cpu_worker_process = subprocess.Popen(cpu_worker.split(" ")) + time.sleep(5) - start_task.si().apply_async() + start_task.si().apply_async() - celery_command = "celery -A augur.tasks.init.celery_app.celery_app beat -l debug" - celery_beat_process = subprocess.Popen(celery_command.split(" ")) + celery_command = "celery -A augur.tasks.init.celery_app.celery_app beat -l debug" + celery_beat_process = subprocess.Popen(celery_command.split(" ")) - else: - logger.info("Collection disabled") + else: + logger.info("Collection disabled") try: server.wait() @@ -119,7 +125,8 @@ def start(disable_collection, development, port): clear_redis_caches() connection_string = "" with DatabaseSession(logger) as session: - connection_string = session.config.get_section("RabbitMQ")['connection_string'] + config = AugurConfig(logger, session) + connection_string = config.get_section("RabbitMQ")['connection_string'] clear_rabbitmq_messages(connection_string) @@ -138,7 +145,8 @@ def stop(): clear_redis_caches() connection_string = "" with DatabaseSession(logger) as session: - connection_string = session.config.get_section("RabbitMQ")['connection_string'] + config = AugurConfig(logger, session) + connection_string = config.get_section("RabbitMQ")['connection_string'] clear_rabbitmq_messages(connection_string) @@ -154,7 +162,8 @@ def kill(): connection_string = "" with DatabaseSession(logger) as session: - connection_string = session.config.get_section("RabbitMQ")['connection_string'] + config = AugurConfig(logger, session) + connection_string = config.get_section("RabbitMQ")['connection_string'] clear_rabbitmq_messages(connection_string) diff --git a/augur/application/cli/config.py b/augur/application/cli/config.py index 3d93d363d7..dad4db4bd3 100644 --- a/augur/application/cli/config.py +++ b/augur/application/cli/config.py @@ -10,6 +10,7 @@ from augur.application.db.models import Config from augur.application.db.session import DatabaseSession from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig from augur.application.cli import test_connection, test_db_connection from augur.util.inspect_without_import import get_phase_names_without_import ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) @@ -62,11 +63,10 @@ def init_config(github_api_key, facade_repo_directory, gitlab_api_key, redis_con with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) default_config = config.default_config - print(f"Dir {os.getcwd()}") phase_names = get_phase_names_without_import() #Add all phases as enabled by default @@ -109,7 +109,7 @@ def init_config(github_api_key, facade_repo_directory, gitlab_api_key, redis_con def load_config(file): with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) print("WARNING: This will override your current config") response = str(input("Would you like to continue: [y/N]: ")).lower() @@ -132,7 +132,7 @@ def load_config(file): def add_section(section_name, file): with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) if config.is_section_in_config(section_name): @@ -161,7 +161,7 @@ def add_section(section_name, file): def config_set(section, setting, value, data_type): with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) if data_type not in config.accepted_types: print(f"Error invalid type for config. Please use one of these types: {config.accepted_types}") @@ -185,7 +185,7 @@ def config_set(section, setting, value, data_type): def config_get(section, setting): with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) if setting: config_value = config.get_value(section_name=section, setting_name=setting) @@ -215,7 +215,7 @@ def config_get(section, setting): def clear_config(): with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) if not config.empty(): diff --git a/augur/application/config.py b/augur/application/config.py index fb97ba31c2..7cc9c01ebc 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -18,7 +18,9 @@ def get_development_flag_from_config(): section = "Augur" setting = "developer" - return config.get_value(section, setting) + flag = config.get_value(section, setting) + + return flag def get_development_flag(): return os.getenv("AUGUR_DEV") or get_development_flag_from_config() or False diff --git a/augur/application/db/data_parse.py b/augur/application/db/data_parse.py index 1b594f6250..86caa544b6 100644 --- a/augur/application/db/data_parse.py +++ b/augur/application/db/data_parse.py @@ -180,8 +180,6 @@ def extract_pr_review_message_ref_data(comment: dict, pr_review_id, repo_id: int def extract_pr_event_data(event: dict, pr_id: int, platform_id: int, repo_id: int, tool_source: str, tool_version: str, data_source: str) -> dict: - # TODO: Add db pull request id - pr_event = { 'pull_request_id': pr_id, 'cntrb_id': event["cntrb_id"] if "cntrb_id" in event else None, @@ -204,8 +202,6 @@ def extract_pr_event_data(event: dict, pr_id: int, platform_id: int, repo_id: in def extract_issue_event_data(event: dict, issue_id: int, platform_id: int, repo_id: int, tool_source: str, tool_version: str, data_source: str) -> dict: - # TODO: Add db pull request id - issue_event = { 'issue_event_src_id': int(event['id']), 'issue_id': issue_id, diff --git a/augur/application/db/engine.py b/augur/application/db/engine.py index e734224348..9d2a2621fc 100644 --- a/augur/application/db/engine.py +++ b/augur/application/db/engine.py @@ -7,13 +7,9 @@ from sqlalchemy import create_engine, event from sqlalchemy.engine.base import Engine from sqlalchemy.pool import NullPool -from augur.application.logs import initialize_stream_handler from augur.application.db.util import catch_operational_error -logger = logging.getLogger("engine") -initialize_stream_handler(logger, logging.ERROR) - def get_database_string() -> str: """Get database string from env or file @@ -29,13 +25,18 @@ def get_database_string() -> str: augur_db_environment_var = os.getenv("AUGUR_DB") - current_dir = os.getcwd() + try: + current_dir = os.getcwd() + except FileNotFoundError: + print("\n\nPlease run augur commands in the root directory\n\n") + sys.exit() + db_json_file_location = current_dir + "/db.config.json" db_json_exists = os.path.exists(db_json_file_location) if not augur_db_environment_var and not db_json_exists: - logger.error("ERROR no way to get connection to the database. \n\t\t\t\t\t\t There is no db.config.json and the AUGUR_DB environment variable is not set\n\t\t\t\t\t\t Please run make install or set the AUGUR_DB environment then run make install") + print("ERROR no way to get connection to the database. \n\t\t\t\t\t\t There is no db.config.json and the AUGUR_DB environment variable is not set\n\t\t\t\t\t\t Please run make install or set the AUGUR_DB environment then run make install") sys.exit() if augur_db_environment_var: diff --git a/augur/application/db/models/__init__.py b/augur/application/db/models/__init__.py index 3d9277fac2..9721a8f4bb 100644 --- a/augur/application/db/models/__init__.py +++ b/augur/application/db/models/__init__.py @@ -107,3 +107,5 @@ SubscriptionType, RefreshToken ) + +DEFAULT_REPO_GROUP_IDS = [1, 10] diff --git a/augur/application/db/models/augur_data.py b/augur/application/db/models/augur_data.py index 9f7bdba0ec..12bc510e5a 100644 --- a/augur/application/db/models/augur_data.py +++ b/augur/application/db/models/augur_data.py @@ -21,24 +21,21 @@ from sqlalchemy.dialects.postgresql import JSONB, TIMESTAMP, UUID from sqlalchemy.orm import relationship from sqlalchemy.sql import text +from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound import logging +import re +from typing import List, Any, Dict + from augur.application.db.models.base import Base from augur.application import requires_db_session +from augur.application.db.util import execute_session_query +DEFAULT_REPO_GROUP_ID = 1 metadata = Base.metadata logger = logging.getLogger(__name__) -def get_session(): - global session - - if "session" not in globals(): - from augur.application.db.session import DatabaseSession - session = DatabaseSession(logger) - - return session - t_analysis_log = Table( "analysis_log", @@ -559,6 +556,26 @@ class RepoGroup(Base): data_source = Column(String) data_collection_date = Column(TIMESTAMP(precision=0)) + @staticmethod + def is_valid_repo_group_id(session, repo_group_id: int) -> bool: + """Deterime is repo_group_id exists. + + Args: + repo_group_id: id from the repo groups table + + Returns: + True if it exists, False if it does not + """ + + query = session.query(RepoGroup).filter(RepoGroup.repo_group_id == repo_group_id) + + try: + result = execute_session_query(query, 'one') + except (NoResultFound, MultipleResultsFound): + return False + + return True + t_repos_fetch_log = Table( "repos_fetch_log", @@ -830,11 +847,148 @@ class Repo(Base): user_repo = relationship("UserRepo") @staticmethod - def get_by_id(repo_id): + def get_by_id(session, repo_id): + + return session.query(Repo).filter(Repo.repo_id == repo_id).first() + + @staticmethod + def is_valid_github_repo(session, url: str) -> bool: + """Determine whether repo url is valid. + + Args: + url: repo_url + + Returns + True if repo url is valid and False if not + """ + from augur.tasks.github.util.github_paginator import hit_api + + REPO_ENDPOINT = "https://api.github.com/repos/{}/{}" + + if not session.oauths.list_of_keys: + return False, {"status": "No valid github api keys to retrieve data with"} + + owner, repo = Repo.parse_github_repo_url(url) + if not owner or not repo: + return False, {"status":"Invalid repo url"} + + url = REPO_ENDPOINT.format(owner, repo) + + attempts = 0 + while attempts < 10: + result = hit_api(session.oauths, url, logger) + + # if result is None try again + if not result: + attempts+=1 + continue + + data = result.json() + # if there was an error return False + if "message" in data.keys(): + + if data["message"] == "Not Found": + return False, {"status": "Invalid repo"} + + return False, {"status": f"Github Error: {data['message']}"} + + return True, {"status": "Valid repo"} + + @staticmethod + def parse_github_repo_url(url: str) -> tuple: + """ Gets the owner and repo from a url. + + Args: + url: Github url + + Returns: + Tuple of owner and repo. Or a tuple of None and None if the url is invalid. + """ + + if url.endswith(".github") or url.endswith(".github.io") or url.endswith(".js"): + + result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _ \.]+)(.git)?\/?$", url) + else: + + result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/([A-Za-z0-9 \- _]+)(.git)?\/?$", url) + + if not result: + return None, None + + capturing_groups = result.groups() + + + owner = capturing_groups[0] + repo = capturing_groups[1] + + return owner, repo + + @staticmethod + def parse_github_org_url(url): + """ Gets the owner from a org url. + + Args: + url: Github org url + + Returns: + Org name. Or None if the url is invalid. + """ + + result = re.search(r"https?:\/\/github\.com\/([A-Za-z0-9 \- _]+)\/?$", url) + + if not result: + return None + + # if the result is not None then the groups should be valid so we don't worry about index errors here + return result.groups()[0] + + @staticmethod + def insert(session, url: str, repo_group_id: int, tool_source): + """Add a repo to the repo table. + + Args: + url: repo url + repo_group_id: group to assign repo to + + Note: + If repo row exists then it will update the repo_group_id if param repo_group_id is not a default. If it does not exist is will simply insert the repo. + """ + + if not isinstance(url, str) or not isinstance(repo_group_id, int) or not isinstance(tool_source, str): + return None + + if not RepoGroup.is_valid_repo_group_id(session, repo_group_id): + return None + + repo_data = { + "repo_group_id": repo_group_id, + "repo_git": url, + "repo_status": "New", + "tool_source": tool_source, + "tool_version": "1.0", + "data_source": "Git" + } + + repo_unique = ["repo_git"] + return_columns = ["repo_id"] + result = session.insert_data(repo_data, Repo, repo_unique, return_columns, on_conflict_update=False) + + if not result: + return None + + if repo_group_id != DEFAULT_REPO_GROUP_ID: + # update the repo group id + query = session.query(Repo).filter(Repo.repo_git == url) + repo = execute_session_query(query, 'one') + + if not repo.repo_group_id == repo_group_id: + repo.repo_group_id = repo_group_id + session.commit() + + return result[0]["repo_id"] + - local_session = get_session() - return local_session.query(Repo).filter(Repo.repo_id == repo_id).first() class RepoTestCoverage(Base): __tablename__ = "repo_test_coverage" diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 78e2e32865..75e5a59e6c 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -2,23 +2,59 @@ from sqlalchemy import BigInteger, SmallInteger, Column, Index, Integer, String, Table, text, UniqueConstraint, Boolean, ForeignKey from sqlalchemy.dialects.postgresql import TIMESTAMP, UUID from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import relationship from werkzeug.security import generate_password_hash, check_password_hash +from typing import List, Any, Dict + import logging import secrets +from augur.application.db.models import Repo +from augur.application.db.session import DatabaseSession from augur.application.db.models.base import Base +DEFAULT_REPO_GROUP_ID = 1 logger = logging.getLogger(__name__) -def get_session(): - global session +def retrieve_org_repos(session, url: str) -> List[str]: + """Get the repos for an org. + + Note: + If the org url is not valid it will return [] + + Args: + url: org url + + Returns + List of valid repo urls or empty list if invalid org + """ + from augur.tasks.github.util.github_paginator import GithubPaginator + + ORG_REPOS_ENDPOINT = "https://api.github.com/orgs/{}/repos?per_page=100" + + owner = Repo.parse_github_org_url(url) + if not owner: + return None, {"status": "Invalid owner url"} + + url = ORG_REPOS_ENDPOINT.format(owner) + + repos = [] + + if not session.oauths.list_of_keys: + return None, {"status": "No valid github api keys to retrieve data with"} + + for page_data, page in GithubPaginator(url, session.oauths, logger).iter_pages(): + + if page_data is None: + break - if "session" not in globals(): - from augur.application.db.session import DatabaseSession - session = DatabaseSession(logger) + repos.extend(page_data) + + repo_urls = [repo["html_url"] for repo in repos] + + return repo_urls, {"status": "Invalid owner url"} - return session metadata = Base.metadata @@ -239,8 +275,8 @@ def is_anoymous(self, val): self._is_anoymous = val @staticmethod - def exists(username): - return User.get_user(username) is not None + def exists(session, username): + return User.get_user(session, username) is not None def get_id(self): return self.login_name @@ -254,15 +290,13 @@ def validate(self, password) -> bool: return result @staticmethod - def get_user(username: str): + def get_user(session, username: str): - if not username: + if not isinstance(username, str): return None - local_session = get_session() - try: - user = local_session.query(User).filter(User.login_name == username).one() + user = session.query(User).filter(User.login_name == username).one() return user except NoResultFound: return None @@ -273,175 +307,132 @@ def create_user(username: str, password: str, email: str, first_name:str, last_n if username is None or password is None or email is None or first_name is None or last_name is None: return False, {"status": "Missing field"} - local_session = get_session() - - user = local_session.query(User).filter(User.login_name == username).first() - if user is not None: - return False, {"status": "A User already exists with that username"} + with DatabaseSession(logger) as session: - emailCheck = local_session.query(User).filter(User.email == email).first() - if emailCheck is not None: - return False, {"status": "A User already exists with that email"} + user = session.query(User).filter(User.login_name == username).first() + if user is not None: + return False, {"status": "A User already exists with that username"} - try: - user = User(login_name = username, login_hashword = generate_password_hash(password), email = email, first_name = first_name, last_name = last_name, tool_source="User API", tool_version=None, data_source="API", admin=admin) - local_session.add(user) - local_session.commit() + emailCheck = session.query(User).filter(User.email == email).first() + if emailCheck is not None: + return False, {"status": "A User already exists with that email"} - result = user.add_group("default") - if not result[0] and result[1]["status"] != "Group already exists": - return False, {"status": "Failed to add default group for the user"} + try: + user = User(login_name = username, login_hashword = generate_password_hash(password), email = email, first_name = first_name, last_name = last_name, tool_source="User API", tool_version=None, data_source="API", admin=admin) + session.add(user) + session.commit() - return True, {"status": "Account successfully created"} - except AssertionError as exception_message: - return False, {"Error": f"{exception_message}."} + result = user.add_group(session, "default") + if not result[0] and result[1]["status"] != "Group already exists": + return False, {"status": "Failed to add default group for the user"} - def delete(self): + return True, {"status": "Account successfully created"} + except AssertionError as exception_message: + return False, {"Error": f"{exception_message}."} - local_session = get_session() + def delete(self, session): for group in self.groups: user_repos_list = group.repos for user_repo_entry in user_repos_list: - local_session.delete(user_repo_entry) + session.delete(user_repo_entry) - local_session.delete(group) + session.delete(group) - local_session.delete(self) - local_session.commit() + session.delete(self) + session.commit() return True, {"status": "User deleted"} - def update_password(self, old_password, new_password): + def update_password(self, session, old_password, new_password): - local_session = get_session() + if not isinstance(old_password, str): + return False, {"status": f"Invalid type {type(old_password)} passed as old_password should be type string"} - if not old_password or not new_password: - print("Need old and new password to update the password") - return False, {"status": "Need old and new password to update the password"} + if not isinstance(new_password, str): + return False, {"status": f"Invalid type {type(new_password)} passed as old_password should be type string"} if not check_password_hash(self.login_hashword, old_password): - print("Password did not match the users password, unable to update password") return False, {"status": "Password did not match users password"} self.login_hashword = generate_password_hash(new_password) - local_session.commit() - # print("Password Updated") + session.commit() return True, {"status": "Password updated"} - def update_email(self, new_email): - - local_session = get_session() + def update_email(self, session, new_email): if not new_email: print("Need new email to update the email") return False, {"status": "Missing argument"} + - existing_user = local_session.query(User).filter(User.email == new_email).first() + existing_user = session.query(User).filter(User.email == new_email).first() if existing_user is not None: print("Func: update_user. Error: Already an account with this email") return False, {"status": "There is already an account with this email"} self.email = new_email - local_session.commit() - # print("Email Updated") - return True, {"status": "Email updated"} + session.commit() - def update_username(self, new_username): + return True, {"status": "Email updated"} - local_session = get_session() + def update_username(self, session, new_username): if not new_username: print("Need new username to update the username") return False, {"status": "Missing argument"} - existing_user = local_session.query(User).filter(User.login_name == new_username).first() + existing_user = session.query(User).filter(User.login_name == new_username).first() if existing_user is not None: print("Func: update_user. Error: Already an account with this username") return False, {"status": "Username already taken"} self.login_name = new_username - local_session.commit() - # print("Username Updated") + session.commit() + return True, {"status": "Username updated"} def add_group(self, group_name): - from augur.util.repo_load_controller import RepoLoadController - - local_session = get_session() - - repo_load_controller = RepoLoadController(gh_session=local_session) - - result = repo_load_controller.add_user_group(self.user_id, group_name) + with DatabaseSession(logger) as session: + result = UserGroup.insert(session, self.user_id, group_name) return result def remove_group(self, group_name): - from augur.util.repo_load_controller import RepoLoadController - - local_session = get_session() - - repo_load_controller = RepoLoadController(gh_session=local_session) - - result = repo_load_controller.remove_user_group(self.user_id, group_name) + with DatabaseSession(logger) as session: + result = UserGroup.delete(session, self.user_id, group_name) return result def add_repo(self, group_name, repo_url): - - from augur.tasks.github.util.github_task_session import GithubTaskSession - from augur.util.repo_load_controller import RepoLoadController - - with GithubTaskSession(logger) as session: - - repo_load_controller = RepoLoadController(gh_session=session) - - result = repo_load_controller.add_frontend_repo(repo_url, self.user_id, group_name) - return result - - def remove_repo(self, group_name, repo_id): - - from augur.util.repo_load_controller import RepoLoadController + with DatabaseSession(logger) as session: + result = UserRepo.add(session, repo_url, self.user_id, group_name) - local_session = get_session() + return result - repo_load_controller = RepoLoadController(gh_session=local_session) + def remove_repo(self, session, group_name, repo_id): - result = repo_load_controller.remove_frontend_repo(repo_id, self.user_id, group_name) - print(result) + with DatabaseSession(logger) as session: + result = UserRepo.delete(session, repo_id, self.user_id, group_name) return result def add_org(self, group_name, org_url): - - from augur.tasks.github.util.github_task_session import GithubTaskSession - from augur.util.repo_load_controller import RepoLoadController - - with GithubTaskSession(logger) as session: - repo_load_controller = RepoLoadController(gh_session=session) - - result = repo_load_controller.add_frontend_org(org_url, self.user_id, group_name) + with DatabaseSession(logger) as session: + result = UserRepo.add_org_repos(session, org_url, self.user_id, group_name) - return result + return result def get_groups(self): - - from augur.util.repo_load_controller import RepoLoadController - - local_session = get_session() - - controller = RepoLoadController(local_session) - - user_groups = controller.get_user_groups(self.user_id) - return user_groups, {"status": "success"} + return self.groups, {"status": "success"} def get_group_names(self): @@ -456,45 +447,34 @@ def get_repos(self, page=0, page_size=25, sort="repo_id", direction="ASC"): from augur.util.repo_load_controller import RepoLoadController - local_session = get_session() - - result = RepoLoadController(local_session).paginate_repos("user", page, page_size, sort, direction, user=self) + with DatabaseSession(logger) as session: + result = RepoLoadController(session).paginate_repos("user", page, page_size, sort, direction, user=self) return result def get_repo_count(self): - from augur.util.repo_load_controller import RepoLoadController - local_session = get_session() - - controller = RepoLoadController(local_session) - - result = controller.get_repo_count(source="user", user=self) + with DatabaseSession(logger) as session: + result = RepoLoadController(session).get_repo_count(source="user", user=self) return result def get_group_repos(self, group_name, page=0, page_size=25, sort="repo_id", direction="ASC"): - from augur.util.repo_load_controller import RepoLoadController - local_session = get_session() - - print("Get group repos") - - result = RepoLoadController(local_session).paginate_repos("group", page, page_size, sort, direction, user=self, group_name=group_name) + with DatabaseSession(logger) as session: + result = RepoLoadController(session).paginate_repos("group", page, page_size, sort, direction, user=self, group_name=group_name) return result def get_group_repo_count(self, group_name): - from augur.util.repo_load_controller import RepoLoadController - local_session = get_session() - - controller = RepoLoadController(local_session) + with DatabaseSession(logger) as session: + controller = RepoLoadController(session) result = controller.get_repo_count(source="group", group_name=group_name, user=self) @@ -502,32 +482,23 @@ def get_group_repo_count(self, group_name): def invalidate_session(self, token): - from augur.application.db.session import DatabaseSession - with DatabaseSession(logger) as session: - - row_count = session.query(UserSessionToken).filter(UserSessionToken.user_id == self.user_id, UserSessionToken.token == token).delete() - session.commit() + row_count = session.query(UserSessionToken).filter(UserSessionToken.user_id == self.user_id, UserSessionToken.token == token).delete() + session.commit() return row_count == 1 def delete_app(self, app_id): - from augur.application.db.session import DatabaseSession - with DatabaseSession(logger) as session: - - row_count = session.query(ClientApplication).filter(ClientApplication.user_id == self.user_id, ClientApplication.id == app_id).delete() - session.commit() + row_count = session.query(ClientApplication).filter(ClientApplication.user_id == self.user_id, ClientApplication.id == app_id).delete() + session.commit() return row_count == 1 def add_app(self, name, redirect_url): - from augur.application.db.session import DatabaseSession - with DatabaseSession(logger) as session: - try: app = ClientApplication(id=secrets.token_hex(16), api_key=secrets.token_hex(), name=name, redirect_url=redirect_url, user_id=self.user_id) session.add(app) @@ -540,24 +511,21 @@ def add_app(self, name, redirect_url): def toggle_group_favorite(self, group_name): - local_session = get_session() - - group = local_session.query(UserGroup).filter(UserGroup.name == group_name, UserGroup.user_id == self.user_id).first() - if not group: - return False, {"status": "Group does not exist"} + with DatabaseSession(logger) as session: + group = session.query(UserGroup).filter(UserGroup.name == group_name, UserGroup.user_id == self.user_id).first() + if not group: + return False, {"status": "Group does not exist"} - group.favorited = not group.favorited + group.favorited = not group.favorited - local_session.commit() + session.commit() return True, {"status": "Success"} - def get_favorite_groups(self): - - local_session = get_session() + def get_favorite_groups(self, session): try: - groups = local_session.query(UserGroup).filter(UserGroup.user_id == self.user_id, UserGroup.favorited == True).all() + groups = session.query(UserGroup).filter(UserGroup.user_id == self.user_id, UserGroup.favorited == True).all() except Exception as e: print(f"Error while trying to get favorite groups: {e}") return None, {"status": "Error when trying to get favorite groups"} @@ -582,6 +550,97 @@ class UserGroup(Base): user = relationship("User") repos = relationship("UserRepo") + @staticmethod + def insert(session, user_id:int, group_name:str) -> dict: + """Add a group to the user. + + Args + user_id: id of the user + group_name: name of the group being added + + Returns: + Dict with status key that indicates the success of the operation + + Note: + If group already exists the function will return that it has been added, but a duplicate group isn't added. + It simply detects that it already exists and doesn't add it. + """ + + if not isinstance(user_id, int) or not isinstance(group_name, str): + return False, {"status": "Invalid input"} + + user_group_data = { + "name": group_name, + "user_id": user_id + } + + user_group = session.query(UserGroup).filter(UserGroup.user_id == user_id, UserGroup.name == group_name).first() + if user_group: + return False, {"status": "Group already exists"} + + try: + result = session.insert_data(user_group_data, UserGroup, ["name", "user_id"], return_columns=["group_id"]) + except IntegrityError: + return False, {"status": "Error: User id does not exist"} + + + if result: + return True, {"status": "Group created"} + + + return False, {"status": "Error while creating group"} + + @staticmethod + def delete(session, user_id: int, group_name: str) -> dict: + """ Delete a users group of repos. + + Args: + user_id: id of the user + group_name: name of the users group + + Returns: + Dict with a status key that indicates the result of the operation + + """ + + group = session.query(UserGroup).filter(UserGroup.name == group_name, UserGroup.user_id == user_id).first() + if not group: + return False, {"status": "WARNING: Trying to delete group that does not exist"} + + # delete rows from user repos with group_id + for repo in group.repos: + session.delete(repo) + + # delete group from user groups table + session.delete(group) + + session.commit() + + return True, {"status": "Group deleted"} + + @staticmethod + def convert_group_name_to_id(session, user_id: int, group_name: str) -> int: + """Convert a users group name to the database group id. + + Args: + user_id: id of the user + group_name: name of the users group + + Returns: + None on failure. The group id on success. + + """ + + if not isinstance(user_id, int) or not isinstance(group_name, str): + return None + + try: + user_group = session.query(UserGroup).filter(UserGroup.user_id == user_id, UserGroup.name == group_name).one() + except NoResultFound: + return None + + return user_group.group_id + class UserRepo(Base): @@ -602,6 +661,141 @@ class UserRepo(Base): repo = relationship("Repo") group = relationship("UserGroup") + @staticmethod + def insert(session, repo_id: int, group_id:int = 1) -> bool: + """Add a repo to a user in the user_repos table. + + Args: + repo_id: id of repo from repo table + user_id: id of user_id from users table + """ + + if not isinstance(repo_id, int) or not isinstance(group_id, int): + return False + + repo_user_group_data = { + "group_id": group_id, + "repo_id": repo_id + } + + + repo_user_group_unique = ["group_id", "repo_id"] + return_columns = ["group_id", "repo_id"] + + try: + data = session.insert_data(repo_user_group_data, UserRepo, repo_user_group_unique, return_columns) + except IntegrityError: + return False + + return data[0]["group_id"] == group_id and data[0]["repo_id"] == repo_id + + @staticmethod + def add(session, url: List[str], user_id: int, group_name=None, group_id=None, valid_repo=False) -> dict: + """Add repo to the user repo table + + Args: + urls: list of repo urls + user_id: id of user_id from users table + group_name: name of group to add repo to. + group_id: id of the group + valid_repo: boolean that indicates whether the repo has already been validated + + Note: + Either the group_name or group_id can be passed not both + + Returns: + Dict that contains the key "status" and additional useful data + """ + + if group_name and group_id: + return False, {"status": "Pass only the group name or group id not both"} + + if not group_name and not group_id: + return False, {"status": "Need group name or group id to add a repo"} + + if group_id is None: + + group_id = UserGroup.convert_group_name_to_id(session, user_id, group_name) + if group_id is None: + return False, {"status": "Invalid group name"} + + if not valid_repo: + result = Repo.is_valid_github_repo(session, url) + if not result[0]: + return False, {"status": result[1]["status"], "repo_url": url} + + repo_id = Repo.insert(session, url, DEFAULT_REPO_GROUP_ID, "Frontend") + if not repo_id: + return False, {"status": "Repo insertion failed", "repo_url": url} + + result = UserRepo.insert(session, repo_id, group_id) + + if not result: + return False, {"status": "repo_user insertion failed", "repo_url": url} + + return True, {"status": "Repo Added", "repo_url": url} + + @staticmethod + def delete(session, repo_id:int, user_id:int, group_name:str) -> dict: + """ Remove repo from a users group. + + Args: + repo_id: id of the repo to remove + user_id: id of the user + group_name: name of group the repo is being removed from + + Returns: + Dict with a key of status that indicates the result of the operation + """ + + if not isinstance(repo_id, int) or not isinstance(user_id, int) or not isinstance(group_name, str): + return False, {"status": "Invalid types"} + + group_id = UserGroup.convert_group_name_to_id(session, user_id, group_name) + if group_id is None: + return False, {"status": "Invalid group name"} + + # delete rows from user repos with group_id + session.query(UserRepo).filter(UserRepo.group_id == group_id, UserRepo.repo_id == repo_id).delete() + session.commit() + + return True, {"status": "Repo Removed"} + + @staticmethod + def add_org_repos(session, url: List[str], user_id: int, group_name: int): + """Add list of orgs and their repos to a users repos. + + Args: + urls: list of org urls + user_id: id of user_id from users table + """ + group_id = UserGroup.convert_group_name_to_id(session, user_id, group_name) + if group_id is None: + return False, {"status": "Invalid group name"} + + result = retrieve_org_repos(session, url) + if not result[0]: + return False, result[1] + + repos = result[0] + # try to get the repo group with this org name + # if it does not exist create one + failed_repos = [] + for repo in repos: + + result = UserRepo.add(session, repo, user_id, group_id=group_id, valid_repo=True) + + # keep track of all the repos that failed + if not result[0]: + failed_repos.append(repo) + + failed_count = len(failed_repos) + if failed_count > 0: + # this should never happen because an org should never return invalid repos + return False, {"status": f"{failed_count} repos failed", "repo_urls": failed_repos, "org_url": url} + + return True, {"status": "Org repos added"} + class UserSessionToken(Base): __tablename__ = "user_session_tokens" __table_args__ = ( @@ -621,17 +815,16 @@ class UserSessionToken(Base): refresh_tokens = relationship("RefreshToken") @staticmethod - def create(user_id, application_id, seconds_to_expire=86400): + def create(session, user_id, application_id, seconds_to_expire=86400): import time user_session_token = secrets.token_hex() expiration = int(time.time()) + seconds_to_expire - local_session = get_session() user_session = UserSessionToken(token=user_session_token, user_id=user_id, application_id = application_id, expiration=expiration) - local_session.add(user_session) - local_session.commit() + session.add(user_session) + session.commit() return user_session @@ -664,11 +857,9 @@ class ClientApplication(Base): subscriptions = relationship("Subscription") @staticmethod - def get_by_id(client_id): + def get_by_id(session, client_id): - local_session = get_session() - - return local_session.query(ClientApplication).filter(ClientApplication.id == client_id).first() + return session.query(ClientApplication).filter(ClientApplication.id == client_id).first() class Subscription(Base): @@ -712,15 +903,14 @@ class RefreshToken(Base): user_session = relationship("UserSessionToken") @staticmethod - def create(user_session_token_id): + def create(session, user_session_token_id): refresh_token_id = secrets.token_hex() - local_session = get_session() refresh_token = RefreshToken(id=refresh_token_id, user_session_token=user_session_token_id) - local_session.add(refresh_token) - local_session.commit() + session.add(refresh_token) + session.commit() return refresh_token diff --git a/augur/application/db/session.py b/augur/application/db/session.py index 1d4901b263..edaa16a70a 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -5,14 +5,14 @@ import random import logging import json -import sqlalchemy as s +from sqlalchemy.orm import Session +from sqlalchemy.dialects import postgresql +from sqlalchemy.exc import OperationalError from typing import Optional, List, Union from psycopg2.errors import DeadlockDetected # from augur.tasks.util.random_key_auth import RandomKeyAuth -from augur.application.config import AugurConfig -from augur.application.db.models import Platform from augur.application.db.engine import EngineConnection from augur.tasks.util.worker_util import remove_duplicate_dicts, remove_duplicates_by_uniques @@ -49,13 +49,11 @@ def remove_null_characters_from_list_of_dicts(data_list, fields): return data_list -class DatabaseSession(s.orm.Session): +class DatabaseSession(Session): def __init__(self, logger, engine=None): self.logger = logger - self.config = AugurConfig(logger=logger, session=self) - self.engine = engine self.engine_created = False @@ -135,7 +133,7 @@ def insert_data(self, data: Union[List[dict], dict], table, natural_keys: List[s # that returns cols specificed in returning_args # and inserts the data specified in data # NOTE: if return_columns does not have an values this still works - stmnt = s.dialects.postgresql.insert(table).returning(*returning_args).values(data) + stmnt = postgresql.insert(table).returning(*returning_args).values(data) if on_conflict_update: @@ -159,7 +157,7 @@ def insert_data(self, data: Union[List[dict], dict], table, natural_keys: List[s ) - # print(str(stmnt.compile(dialect=s.dialects.postgresql.dialect()))) + # print(str(stmnt.compile(dialect=postgresql.dialect()))) attempts = 0 # creates list from 1 to 10 sleep_time_list = list(range(1,11)) @@ -174,7 +172,7 @@ def insert_data(self, data: Union[List[dict], dict], table, natural_keys: List[s with EngineConnection(self.engine) as connection: connection.execute(stmnt) break - except s.exc.OperationalError as e: + except OperationalError as e: # print(str(e).split("Process")[1].split(";")[0]) if isinstance(e.orig, DeadlockDetected): deadlock_detected = True @@ -213,7 +211,7 @@ def insert_data(self, data: Union[List[dict], dict], table, natural_keys: List[s with EngineConnection(self.engine) as connection: return_data_tuples = connection.execute(stmnt).fetchall() break - except s.exc.OperationalError as e: + except OperationalError as e: if isinstance(e.orig, DeadlockDetected): sleep_time = random.choice(sleep_time_list) self.logger.debug(f"Deadlock detected on {table.__table__} table...trying again in {round(sleep_time)} seconds: transaction size: {len(data)}") diff --git a/augur/application/db/util.py b/augur/application/db/util.py index 544a355ae2..153b3e2deb 100644 --- a/augur/application/db/util.py +++ b/augur/application/db/util.py @@ -6,6 +6,7 @@ def catch_operational_error(func): attempts = 0 + error = None while attempts < 4: # do the sleep here instead of instead of in the exception @@ -16,10 +17,11 @@ def catch_operational_error(func): return func() except OperationalError as e: print(f"ERROR: {e}") + error = str(e) attempts += 1 - raise Exception("Unable to Resolve Operational Error") + raise Exception(error) def execute_session_query(query, query_type="all"): diff --git a/augur/application/util.py b/augur/application/util.py index 2d606804a6..9478ab5a02 100644 --- a/augur/application/util.py +++ b/augur/application/util.py @@ -1,13 +1,13 @@ import logging -from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.util.repo_load_controller import RepoLoadController logger = logging.getLogger(__name__) def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): - with GithubTaskSession(logger) as session: + with DatabaseSession(logger) as session: controller = RepoLoadController(session) @@ -17,7 +17,7 @@ def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): def get_all_repos_count(): - with GithubTaskSession(logger) as session: + with DatabaseSession(logger) as session: controller = RepoLoadController(session) diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index 36c4e5f08f..649fc81cfb 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -19,8 +19,9 @@ from textblob import TextBlob from collections import Counter -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig from augur.application.db.models import Repo, RepoClusterMessage, RepoTopic, TopicWord from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query @@ -49,15 +50,17 @@ def clustering_model(repo_git: str) -> None: tool_version = '0.2.0' data_source = 'Augur Collected Messages' - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: + + config = AugurConfig(logger, session) query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_id = execute_session_query(query, 'one').repo_id - num_clusters = session.config.get_value("Clustering_Task", 'num_clusters') - max_df = session.config.get_value("Clustering_Task", 'max_df') - max_features = session.config.get_value("Clustering_Task", 'max_features') - min_df = session.config.get_value("Clustering_Task", 'min_df') + num_clusters = config.get_value("Clustering_Task", 'num_clusters') + max_df = config.get_value("Clustering_Task", 'max_df') + max_features = config.get_value("Clustering_Task", 'max_features') + min_df = config.get_value("Clustering_Task", 'min_df') logger.info(f"Min df: {min_df}. Max df: {max_df}") @@ -165,7 +168,7 @@ def clustering_model(repo_git: str) -> None: 'tool_version': tool_version, 'data_source': data_source } - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: repo_cluster_messages_obj = RepoClusterMessage(**record) session.add(repo_cluster_messages_obj) session.commit() @@ -193,7 +196,7 @@ def clustering_model(repo_git: str) -> None: prediction = lda_model.transform(count_matrix_cur_repo) logger.debug('for loop for vocab') - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: for i, prob_vector in enumerate(prediction): # repo_id = msg_df.loc[i]['repo_id'] for i, prob in enumerate(prob_vector): @@ -367,7 +370,7 @@ def visualize_labels_PCA(features, labels, annotations, num_components, title): # twid = self.db.execute(key_sequence_words_sql) # logger.info("twid variable is: {}".format(twid)) # insert topic list into database - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: topic_id = 1 for topic in topic_list: # twid = self.get_max_id('topic_words', 'topic_words_id') + 1 diff --git a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py index 907356da79..183290edd4 100644 --- a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py +++ b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py @@ -3,7 +3,7 @@ import pandas as pd import sqlalchemy as s -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession from augur.tasks.github.util.github_paginator import GithubPaginator from augur.application.db.models import ContributorRepo @@ -101,7 +101,7 @@ def contributor_breadth_model() -> None: # source_cntrb_repos seemed like not exactly what the variable is for; its a list of actions for # each Github gh_login value already in our database - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: cntrb_events = [] for page_data, page in GithubPaginator(repo_cntrb_url, session.oauths, logger).iter_pages(): diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index 1a3acb0fc1..b11b711e2d 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -6,7 +6,7 @@ import nltk from collections import Counter -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, DiscourseInsight from augur.application.db.engine import DatabaseEngine @@ -40,7 +40,7 @@ def discourse_analysis_model(repo_git: str) -> None: tool_version = '0.1.0' data_source = 'Analysis of Issue/PR Messages' - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_id = execute_session_query(query, 'one').repo_id @@ -88,7 +88,7 @@ def discourse_analysis_model(repo_git: str) -> None: logger.debug(f"y_pred_git_flat len: {len(y_pred_git_flat)}") msg_df_cur_repo['discourse_act'] = y_pred_git_flat - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: for index, row in msg_df_cur_repo.iterrows(): record = { 'msg_id': row['msg_id'], diff --git a/augur/tasks/data_analysis/insight_worker/tasks.py b/augur/tasks/data_analysis/insight_worker/tasks.py index 60990f9701..16f8cfaeba 100644 --- a/augur/tasks/data_analysis/insight_worker/tasks.py +++ b/augur/tasks/data_analysis/insight_worker/tasks.py @@ -13,8 +13,9 @@ from sklearn.ensemble import IsolationForest import warnings -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig from augur.application.db.models import Repo, ChaossMetricStatus, RepoInsight, RepoInsightsRecord from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query @@ -37,17 +38,19 @@ def insight_model(repo_git: str) -> None: metrics = {"issues-new": "issues", "code-changes": "commit_count", "code-changes-lines": "added", "reviews": "pull_requests", "contributors-new": "new_contributors"} - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: + + config = AugurConfig(logger, session) query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_id = execute_session_query(query, 'one').repo_id - anomaly_days = session.config.get_value('Insight_Task', 'anomaly_days') - training_days = session.config.get_value('Insight_Task', 'training_days') - contamination = session.config.get_value('Insight_Task', 'contamination') - confidence = session.config.get_value('Insight_Task', 'confidence_interval') / 100 - api_host = session.config.get_value('Server', 'host') - api_port = session.config.get_value('Server', 'port') + anomaly_days = config.get_value('Insight_Task', 'anomaly_days') + training_days = config.get_value('Insight_Task', 'training_days') + contamination = config.get_value('Insight_Task', 'contamination') + confidence = config.get_value('Insight_Task', 'confidence_interval') / 100 + api_host = config.get_value('Server', 'host') + api_port = config.get_value('Server', 'port') logger.info("Discovering insights for repo {}\n".format(repo_git)) @@ -242,7 +245,7 @@ def classify_anomalies(df, metric): "data_source": data_source } - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: repo_insight_record_obj = RepoInsightsRecord(**record) session.add(repo_insight_record_obj) session.commit() @@ -287,7 +290,7 @@ def classify_anomalies(df, metric): "data_source": data_source } - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: repo_insight_obj = RepoInsight(**data_point) session.add(repo_insight_obj) session.commit() @@ -464,7 +467,7 @@ def is_unique_key(key): "data_source": data_source } - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: repo_insight_obj = RepoInsightsRecord(**record) session.add(repo_insight_obj) session.commit() @@ -495,7 +498,7 @@ def is_unique_key(key): "tool_version": tool_version, "data_source": data_source } - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: repo_insight_obj = RepoInsight(**data_point) session.add(repo_insight_obj) session.commit() @@ -727,7 +730,7 @@ def update_metrics(api_host, api_port, tool_source, tool_version, logger): "tool_version": tool_version, "data_source": metric['data_source'] } - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: cms_tuple = ChaossMetricStatus(**cms_tuple) session.add(cms_tuple) session.commit() diff --git a/augur/tasks/data_analysis/message_insights/tasks.py b/augur/tasks/data_analysis/message_insights/tasks.py index cfb7b90ce4..6a34944473 100644 --- a/augur/tasks/data_analysis/message_insights/tasks.py +++ b/augur/tasks/data_analysis/message_insights/tasks.py @@ -11,8 +11,9 @@ from augur.tasks.data_analysis.message_insights.message_novelty import novelty_analysis from augur.tasks.data_analysis.message_insights.message_sentiment import get_senti_score -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig from augur.application.db.models import Repo, MessageAnalysis, MessageAnalysisSummary from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query @@ -36,13 +37,15 @@ def message_insight_model(repo_git: str) -> None: now = datetime.datetime.utcnow() run_id = int(now.timestamp())+5 - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: + + config = AugurConfig(logger, session) query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_id = execute_session_query(query, 'one').repo_id - models_dir = os.path.join(ROOT_AUGUR_DIRECTORY, "tasks", "data_analysis", "message_insights", session.config.get_value("Message_Insights", 'models_dir')) - insight_days = session.config.get_value("Message_Insights", 'insight_days') + models_dir = os.path.join(ROOT_AUGUR_DIRECTORY, "tasks", "data_analysis", "message_insights", config.get_value("Message_Insights", 'models_dir')) + insight_days = config.get_value("Message_Insights", 'insight_days') # Any initial database instructions, like finding the last tuple inserted or generate the next ID value @@ -186,7 +189,7 @@ def message_insight_model(repo_git: str) -> None: logger.info('Begin message_analysis data insertion...') logger.info(f'{df_message.shape[0]} data records to be inserted') - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: for row in df_message.itertuples(index=False): try: diff --git a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py index 9a78f896c0..d6cd816782 100644 --- a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py +++ b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py @@ -8,8 +8,9 @@ from augur.tasks.data_analysis.message_insights.message_sentiment import get_senti_score -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig from augur.application.db.models import Repo, PullRequestAnalysis from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query @@ -31,12 +32,14 @@ def pull_request_analysis_model(repo_git: str) -> None: insight_days = 200 - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: + + config = AugurConfig(logger, session) query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_id = execute_session_query(query, 'one').repo_id - senti_models_dir = os.path.join(ROOT_AUGUR_DIRECTORY, "tasks", "data_analysis", "message_insights", session.config.get_value("Message_Insights", 'models_dir')) + senti_models_dir = os.path.join(ROOT_AUGUR_DIRECTORY, "tasks", "data_analysis", "message_insights", config.get_value("Message_Insights", 'models_dir')) logger.info(f'Sentiment model dir located - {senti_models_dir}') @@ -203,7 +206,7 @@ def pull_request_analysis_model(repo_git: str) -> None: logger.info('Begin PR_analysis data insertion...') logger.info(f'{df.shape[0]} data records to be inserted') - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: for row in df.itertuples(index=False): try: msg = { diff --git a/augur/tasks/db/refresh_materialized_views.py b/augur/tasks/db/refresh_materialized_views.py index c1a569f228..4c15434da2 100644 --- a/augur/tasks/db/refresh_materialized_views.py +++ b/augur/tasks/db/refresh_materialized_views.py @@ -4,7 +4,7 @@ from celery import signature from celery import group, chain, chord, signature -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.session import DatabaseSession @@ -25,6 +25,6 @@ def refresh_materialized_views(): REFRESH MATERIALIZED VIEW augur_data.explorer_libyear_summary with data; """) - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: session.execute_sql(refresh_view_query) \ No newline at end of file diff --git a/augur/tasks/git/dependency_tasks/core.py b/augur/tasks/git/dependency_tasks/core.py index 41e11b2409..0be1530663 100644 --- a/augur/tasks/git/dependency_tasks/core.py +++ b/augur/tasks/git/dependency_tasks/core.py @@ -2,10 +2,12 @@ import logging import requests import json +import traceback from augur.application.db.data_parse import * from augur.application.db.models import * from augur.application.db.session import DatabaseSession from augur.tasks.init.celery_app import engine +from augur.application.config import AugurConfig from augur.application.db.util import execute_session_query from augur.tasks.git.dependency_tasks.dependency_util import dependency_calculator as dep_calc @@ -56,7 +58,8 @@ def deps_model(session, repo_id): result = session.execute_sql(repo_path_sql) relative_repo_path = result.fetchone()[1] - absolute_repo_path = session.config.get_section("Facade")['repo_directory'] + relative_repo_path + config = AugurConfig(session.logger, session) + absolute_repo_path = config.get_section("Facade")['repo_directory'] + relative_repo_path try: generate_deps_data(session,repo_id, absolute_repo_path) diff --git a/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py b/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py index 58f83ba7ac..a1319f6c04 100644 --- a/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py +++ b/augur/tasks/git/dependency_tasks/dependency_util/python_deps.py @@ -1,6 +1,7 @@ import sys import re from pathlib import Path +import codecs def get_files(path): #copied from example on https://docs.python.org/3/library/pathlib.html @@ -10,7 +11,8 @@ def get_files(path): return files def get_deps_for_file(path): - f = open(path, 'r') + f = open(path, 'r',encoding="utf-8") + matches = re.findall("import\s*(\w*)", f.read()) f.close() return matches diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 64283d3842..d9daf4571b 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -29,7 +29,7 @@ from augur.tasks.util.worker_util import create_grouped_task_load -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db import data_parse @@ -75,7 +75,7 @@ def grab_comitters(repo_id_list,platform="github"): for repo_id in repo_id_list: try: - grab_committer_list(GithubTaskSession(logger), repo_id,platform) + grab_committer_list(GithubTaskSession(logger, engine), repo_id,platform) except Exception as e: logger.error(f"Could not grab committers from github endpoint!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") @@ -188,18 +188,6 @@ def update_analysis_log(repos_id,status): session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") - if len(missing_commits) > 0: - #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) - - #encode the repo_id with the commit. - commits_with_repo_tuple = [(commit,repo_id) for commit in list(missing_commits)] - - #1/21/2023: SPG things list needs to be initialized based on error - all_missing_commits = [] - - #Get all missing commits into one large list to split into task pools - all_missing_commits.extend(commits_with_repo_tuple) - # Find commits which are out of the analysis range trimmed_commits = existing_commits - parent_commits @@ -208,7 +196,6 @@ def update_analysis_log(repos_id,status): update_analysis_log(repo_id,'Beginning to trim commits') - session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") @@ -250,12 +237,13 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) + # TODO: Is this session ever closed? session = FacadeSession(logger) start_date = session.get_setting('start_date') for repo_id in repo_ids: session.logger.info(f"Generating sequence for repo {repo_id}") - + query = session.query(Repo).filter(Repo.repo_id == repo_id) repo = execute_session_query(query, 'one') @@ -310,16 +298,22 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: logger.info(f"Got to analysis!") for count, commitTuple in enumerate(queue): + quarterQueue = int(len(queue) / 4) + + if quarterQueue == 0: + quarterQueue = 1 # prevent division by zero with integer math #Log progress when another quarter of the queue has been processed - #Checking for Modulo of Zero first. - if int(len(queue)/4)!=0: - if (count + 1) % int(len(queue) / 4) == 0: - logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") + if (count + 1) % quarterQueue == 0: + logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") query = session.query(Repo).filter(Repo.repo_id == repo_id) repo = execute_session_query(query,'one') + logger.info(f"Got to analysis!") + + for count, commitTuple in enumerate(queue): + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") analyze_commit(session, repo_id, repo_loc, commitTuple) @@ -330,6 +324,7 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: @celery.task def nuke_affiliations_facade_task(): logger = logging.getLogger(nuke_affiliations_facade_task.__name__) + # TODO: Is this session ever closed? session = FacadeSession(logger) nuke_affiliations(session) @@ -354,6 +349,47 @@ def rebuild_unknown_affiliation_and_web_caches_facade_task(): with FacadeSession(logger) as session: rebuild_unknown_affiliation_and_web_caches(session) +@celery.task +def force_repo_analysis_facade_task(repo_git_identifiers): + logger = logging.getLogger(force_repo_analysis_facade_task.__name__) + + with FacadeSession(logger) as session: + force_repo_analysis(session, repo_git_identifiers) + +@celery.task +def git_repo_cleanup_facade_task(repo_git_identifiers): + logger = logging.getLogger(git_repo_cleanup_facade_task.__name__) + + with FacadeSession(logger) as session: + git_repo_cleanup(session, repo_git_identifiers) + +@celery.task +def git_repo_initialize_facade_task(repo_git_identifiers): + logger = logging.getLogger(git_repo_initialize_facade_task.__name__) + + with FacadeSession(logger) as session: + git_repo_initialize(session, repo_git_identifiers) + +@celery.task +def check_for_repo_updates_facade_task(repo_git_identifiers): + logger = logging.getLogger(check_for_repo_updates_facade_task.__name__) + + with FacadeSession(logger) as session: + check_for_repo_updates(session, repo_git_identifiers) + +@celery.task +def force_repo_updates_facade_task(repo_git_identifiers): + logger = logging.getLogger(force_repo_updates_facade_task.__name__) + + with FacadeSession(logger) as session: + force_repo_updates(session, repo_git_identifiers) + +@celery.task +def git_repo_updates_facade_task(repo_git_identifiers): + logger = logging.getLogger(git_repo_updates_facade_task.__name__) + + with FacadeSession(logger) as session: + git_repo_updates(session, repo_git_identifiers) def generate_analysis_sequence(logger,repo_git_identifiers): @@ -455,22 +491,22 @@ def generate_facade_chain(logger,repo_git_identifiers): facade_sequence = [] if not limited_run or (limited_run and delete_marked_repos): - git_repo_cleanup(session,repo_git_identifiers) + facade_sequence.append(git_repo_cleanup_facade_task.si(repo_git_identifiers))#git_repo_cleanup(session,repo_git_identifiers) if not limited_run or (limited_run and clone_repos): - git_repo_initialize(session,repo_git_identifiers) + facade_sequence.append(git_repo_initialize_facade_task.si(repo_git_identifiers))#git_repo_initialize(session,repo_git_identifiers) if not limited_run or (limited_run and check_updates): - check_for_repo_updates(session,repo_git_identifiers) + facade_sequence.append(check_for_repo_updates_facade_task.si(repo_git_identifiers))#check_for_repo_updates(session,repo_git_identifiers) if force_updates: - force_repo_updates(session,repo_git_identifiers)#facade_sequence.append(force_repo_updates_facade_task.si()) + facade_sequence.append(force_repo_updates_facade_task.si(repo_git_identifiers)) if not limited_run or (limited_run and pull_repos): - git_repo_updates(session,repo_git_identifiers)#facade_sequence.append(git_repo_updates_facade_task.si()) + facade_sequence.append(git_repo_updates_facade_task.si(repo_git_identifiers)) if force_analysis: - force_repo_analysis(session,repo_git_identifiers)#facade_sequence.append(force_repo_analysis_facade_task.si()) + facade_sequence.append(force_repo_analysis_facade_task.si(repo_git_identifiers)) #Generate commit analysis task order. facade_sequence.extend(generate_analysis_sequence(logger,repo_git_identifiers)) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py index 5ff6a44077..f0f16b3d38 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py @@ -45,6 +45,7 @@ from augur.tasks.github.util.github_task_session import * from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig from logging import Logger logger = logging.getLogger(__name__) @@ -52,7 +53,12 @@ def get_database_args_from_env(): db_str = os.getenv("AUGUR_DB") - db_json_file_location = os.getcwd() + "/db.config.json" + try: + db_json_file_location = os.getcwd() + "/db.config.json" + except FileNotFoundError: + logger.error("\n\nPlease run augur commands in the root directory\n\n") + sys.exit() + db_json_exists = os.path.exists(db_json_file_location) if not db_str and not db_json_exists: @@ -109,7 +115,8 @@ def __init__(self,logger: Logger): self.repos_processed = 0 super().__init__(logger=logger) # Figure out what we need to do - worker_options = self.config.get_section("Facade") + + worker_options = AugurConfig(logger, self).get_section("Facade") self.limited_run = worker_options["limited_run"] self.delete_marked_repos = worker_options["delete_marked_repos"] @@ -284,7 +291,7 @@ def __init__(self, logger: Logger): #worker_options = read_config("Workers", "facade_worker", None, None) with DatabaseSession(logger) as session: - config = session.config + config = AugurConfig(logger, session) worker_options = config.get_section("Facade") if 'repo_directory' in worker_options: diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index dfc5cee9e3..69c268a001 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -1,6 +1,6 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.detect_move.core import * -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.util import execute_session_query @@ -10,7 +10,7 @@ def detect_github_repo_move(repo_git_identifiers : [str]) -> None: logger = logging.getLogger(detect_github_repo_move.__name__) logger.info(f"Starting repo_move operation with {repo_git_identifiers}") - with GithubTaskSession(logger) as session: + with GithubTaskSession(logger, engine) as session: #Ping each repo with the given repo_git to make sure #that they are still in place. for repo_git in repo_git_identifiers: diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 0b1242599e..17e1efd917 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -6,6 +6,7 @@ from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.github.util.util import get_owner_repo from augur.tasks.util.worker_util import remove_duplicate_dicts from augur.application.db.models import PullRequest, Message, PullRequestReview, PullRequestLabel, PullRequestReviewer, PullRequestEvent, PullRequestMeta, PullRequestAssignee, PullRequestReviewMessageRef, Issue, IssueEvent, IssueLabel, IssueAssignee, PullRequestMessageRef, IssueMessageRef, Contributor, Repo @@ -19,13 +20,12 @@ def collect_events(repo_git_identifiers: [str]): logger = logging.getLogger(collect_events.__name__) + with DatabaseSession(logger, engine) as session: - for repo_git in repo_git_identifiers: + for repo_git in repo_git_identifiers: - try: - # define GithubTaskSession to handle insertions, and store oauth keys - with GithubTaskSession(logger) as session: - + try: + query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_obj = execute_session_query(query, 'one') repo_id = repo_obj.repo_id @@ -36,16 +36,16 @@ def collect_events(repo_git_identifiers: [str]): url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" - event_data = retrieve_all_event_data(repo_git, logger) + event_data = retrieve_all_event_data(repo_git, logger) - if event_data: - - process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) + if event_data: + + process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no events") - except Exception as e: - logger.error(f"Could not collect events for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + else: + logger.info(f"{owner}/{repo} has no events") + except Exception as e: + logger.error(f"Could not collect events for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_event_data(repo_git: str, logger): @@ -56,7 +56,6 @@ def retrieve_all_event_data(repo_git: str, logger): url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" - # define GithubTaskSession to handle insertions, and store oauth keys with GithubTaskSession(logger, engine) as session: # returns an iterable of all issues at this url (this essentially means you can treat the issues variable as a list of the issues) @@ -91,7 +90,7 @@ def process_events(events, task_name, repo_id, logger): issue_event_dicts = [] contributors = [] - with GithubTaskSession(logger, engine) as session: + with DatabaseSession(logger, engine) as session: not_mapable_event_count = 0 event_len = len(events) diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 054ee91564..74c2aa139c 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -227,9 +227,8 @@ def link_commits_to_contributor(session,contributorQueue): @celery.task def insert_facade_contributors(repo_id_list): logger = logging.getLogger(insert_facade_contributors.__name__) - #session = GithubTaskSession(logger) - with GithubTaskSession(logger) as session: + with GithubTaskSession(logger, engine) as session: # Get all of the commit data's emails and names from the commit table that do not appear @@ -339,4 +338,3 @@ def insert_facade_contributors(repo_id_list): session.logger.info("Done with inserting and updating facade contributors") return - diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index f33459795e..416ddfc22d 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -10,6 +10,7 @@ from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.github.util.util import add_key_value_pair_to_dicts, get_owner_repo from augur.tasks.util.worker_util import remove_duplicate_dicts from augur.application.db.models import PullRequest, Message, PullRequestReview, PullRequestLabel, PullRequestReviewer, PullRequestEvent, PullRequestMeta, PullRequestAssignee, PullRequestReviewMessageRef, Issue, IssueEvent, IssueLabel, IssueAssignee, PullRequestMessageRef, IssueMessageRef, Contributor, Repo @@ -22,28 +23,27 @@ def collect_issues(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_issues.__name__) - for repo_git in repo_git_identifiers: - try: - owner, repo = get_owner_repo(repo_git) - - # define GithubTaskSession to handle insertions, and store oauth keys - with GithubTaskSession(logger) as session: + with DatabaseSession(logger, engine) as session: + for repo_git in repo_git_identifiers: + try: + query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_obj = execute_session_query(query, 'one') repo_id = repo_obj.repo_id - - issue_data = retrieve_all_issue_data(repo_git, logger) - - if issue_data: + owner, repo = get_owner_repo(repo_git) - process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) + issue_data = retrieve_all_issue_data(repo_git, logger) - else: - logger.info(f"{owner}/{repo} has no issues") - except Exception as e: - logger.error(f"Could not collect issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + if issue_data: + + process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no issues") + except Exception as e: + logger.error(f"Could not collect issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_issue_data(repo_git, logger) -> None: @@ -54,7 +54,6 @@ def retrieve_all_issue_data(repo_git, logger) -> None: url = f"https://api.github.com/repos/{owner}/{repo}/issues?state=all" - # define GithubTaskSession to handle insertions, and store oauth keys with GithubTaskSession(logger, engine) as session: @@ -134,7 +133,7 @@ def process_issues(issues, task_name, repo_id, logger) -> None: print("No issues found while processing") return - with GithubTaskSession(logger, engine) as session: + with DatabaseSession(logger, engine) as session: # remove duplicate contributors before inserting contributors = remove_duplicate_dicts(contributors) diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index f676d28c60..537d273984 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -6,6 +6,7 @@ from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.util.worker_util import remove_duplicate_dicts from augur.tasks.github.util.util import get_owner_repo from augur.application.db.models import PullRequest, Message, PullRequestReview, PullRequestLabel, PullRequestReviewer, PullRequestEvent, PullRequestMeta, PullRequestAssignee, PullRequestReviewMessageRef, Issue, IssueEvent, IssueLabel, IssueAssignee, PullRequestMessageRef, IssueMessageRef, Contributor, Repo @@ -20,25 +21,26 @@ def collect_github_messages(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_github_messages.__name__) + + with DatabaseSession(logger, engine) as session: - for repo_git in repo_git_identifiers: - try: - with GithubTaskSession(logger, engine) as session: - + for repo_git in repo_git_identifiers: + try: + repo_id = session.query(Repo).filter( Repo.repo_git == repo_git).one().repo_id - owner, repo = get_owner_repo(repo_git) - message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) + owner, repo = get_owner_repo(repo_git) + message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) - if message_data: - - process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) + if message_data: + + process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no messages") - except Exception as e: - logger.error(f"Could not collect github messages for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + else: + logger.info(f"{owner}/{repo} has no messages") + except Exception as e: + logger.error(f"Could not collect github messages for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: @@ -98,7 +100,7 @@ def process_messages(messages, task_name, repo_id, logger): if len(messages) == 0: logger.info(f"{task_name}: No messages to process") - with GithubTaskSession(logger, engine) as session: + with DatabaseSession(logger, engine) as session: for message in messages: diff --git a/augur/tasks/github/pull_requests/commits_model/core.py b/augur/tasks/github/pull_requests/commits_model/core.py index 6ecd770f35..cc9b277889 100644 --- a/augur/tasks/github/pull_requests/commits_model/core.py +++ b/augur/tasks/github/pull_requests/commits_model/core.py @@ -22,7 +22,9 @@ def pull_request_commits_model(repo_id,logger): """).bindparams(repo_id=repo_id) pr_urls = [] #pd.read_sql(pr_number_sql, self.db, params={}) - session = GithubTaskSession(logger) + + # TODO: Is this session ever closed? + session = GithubTaskSession(logger, engine) pr_urls = session.fetchall_data_from_sql_text(pr_url_sql)#session.execute_sql(pr_number_sql).fetchall() query = session.query(Repo).filter(Repo.repo_id == repo_id) diff --git a/augur/tasks/github/pull_requests/commits_model/tasks.py b/augur/tasks/github/pull_requests/commits_model/tasks.py index e50ea9b4ea..93e3eaba99 100644 --- a/augur/tasks/github/pull_requests/commits_model/tasks.py +++ b/augur/tasks/github/pull_requests/commits_model/tasks.py @@ -1,8 +1,8 @@ import logging import traceback -from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.github.pull_requests.commits_model.core import * -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.util import execute_session_query @@ -10,12 +10,13 @@ def process_pull_request_commits(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(process_pull_request_commits.__name__) - for repo_git in repo_git_identifiers: - with GithubTaskSession(logger) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - pull_request_commits_model(repo.repo_id, logger) - except Exception as e: - logger.error(f"Could not complete pull_request_commits_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - raise e + with DatabaseSession(logger, engine) as session: + + for repo_git in repo_git_identifiers: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + pull_request_commits_model(repo.repo_id, logger) + except Exception as e: + logger.error(f"Could not complete pull_request_commits_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + raise e diff --git a/augur/tasks/github/pull_requests/core.py b/augur/tasks/github/pull_requests/core.py index 3aa1b6ffe8..9cd8008993 100644 --- a/augur/tasks/github/pull_requests/core.py +++ b/augur/tasks/github/pull_requests/core.py @@ -224,7 +224,7 @@ def insert_pr_labels(labels: List[dict], logger: logging.Logger) -> None: labels: list of labels to insert logger: handles logging """ - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: # we are using pr_src_id and pull_request_id to determine if the label is already in the database. pr_label_natural_keys = ['pr_src_id', 'pull_request_id'] @@ -241,7 +241,7 @@ def insert_pr_assignees(assignees: List[dict], logger: logging.Logger) -> None: assignees: list of assignees to insert logger: handles logging """ - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: # we are using pr_assignee_src_id and pull_request_id to determine if the label is already in the database. pr_assignee_natural_keys = ['pr_assignee_src_id', 'pull_request_id'] @@ -258,7 +258,7 @@ def insert_pr_reviewers(reviewers: List[dict], logger: logging.Logger) -> None: reviewers: list of reviewers to insert logger: handles logging """ - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: # we are using pr_src_id and pull_request_id to determine if the label is already in the database. pr_reviewer_natural_keys = ["pull_request_id", "pr_reviewer_src_id"] @@ -275,7 +275,7 @@ def insert_pr_metadata(metadata: List[dict], logger: logging.Logger) -> None: metadata: list of metadata to insert logger: handles logging """ - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: # inserting pr metadata # we are using pull_request_id, pr_head_or_base, and pr_sha to determine if the label is already in the database. diff --git a/augur/tasks/github/pull_requests/files_model/core.py b/augur/tasks/github/pull_requests/files_model/core.py index 8163aeba0c..e8c1cdb3f9 100644 --- a/augur/tasks/github/pull_requests/files_model/core.py +++ b/augur/tasks/github/pull_requests/files_model/core.py @@ -22,7 +22,9 @@ def pull_request_files_model(repo_id,logger): """).bindparams(repo_id=repo_id) pr_numbers = [] #pd.read_sql(pr_number_sql, self.db, params={}) - session = GithubTaskSession(logger) + + # TODO: Is this session ever closed? + session = GithubTaskSession(logger, engine) result = session.execute_sql(pr_number_sql).fetchall() pr_numbers = [dict(zip(row.keys(), row)) for row in result] diff --git a/augur/tasks/github/pull_requests/files_model/tasks.py b/augur/tasks/github/pull_requests/files_model/tasks.py index fbe29795ac..813f71116c 100644 --- a/augur/tasks/github/pull_requests/files_model/tasks.py +++ b/augur/tasks/github/pull_requests/files_model/tasks.py @@ -1,20 +1,21 @@ import logging import traceback -from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.github.pull_requests.files_model.core import * -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.util import execute_session_query @celery.task def process_pull_request_files(repo_git_identifiers: str) -> None: logger = logging.getLogger(process_pull_request_files.__name__) - for repo_git in repo_git_identifiers: - with GithubTaskSession(logger) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - pull_request_files_model(repo.repo_id, logger) - except Exception as e: - logger.error(f"Could not complete pull_request_files_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - #raise e \ No newline at end of file + with DatabaseSession(logger, engine) as session: + + for repo_git in repo_git_identifiers: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + pull_request_files_model(repo.repo_id, logger) + except Exception as e: + logger.error(f"Could not complete pull_request_files_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + #raise e diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 961a2ad4cd..9ec691595b 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -7,6 +7,7 @@ from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.util.worker_util import remove_duplicate_dicts from augur.tasks.github.util.util import add_key_value_pair_to_dicts, get_owner_repo from augur.application.db.models import PullRequest, Message, PullRequestReview, PullRequestLabel, PullRequestReviewer, PullRequestEvent, PullRequestMeta, PullRequestAssignee, PullRequestReviewMessageRef, PullRequestMessageRef, Contributor, Repo @@ -20,25 +21,25 @@ def collect_pull_requests(repo_git_identifiers: [str]) -> None: logger = logging.getLogger(collect_pull_requests.__name__) - - for repo_git in repo_git_identifiers: - try: - with GithubTaskSession(logger, engine) as session: - + with DatabaseSession(logger, engine) as session: + + for repo_git in repo_git_identifiers: + try: + repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id + Repo.repo_git == repo_git).one().repo_id - owner, repo = get_owner_repo(repo_git) - pr_data = retrieve_all_pr_data(repo_git, logger) + owner, repo = get_owner_repo(repo_git) + pr_data = retrieve_all_pr_data(repo_git, logger) - if pr_data: - process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no pull requests") - except Exception as e: - logger.error(f"Could not collect pull requests for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - + if pr_data: + process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) + else: + logger.info(f"{owner}/{repo} has no pull requests") + except Exception as e: + logger.error(f"Could not collect pull requests for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + # TODO: Rename pull_request_reviewers table to pull_request_requested_reviewers # TODO: Fix column names in pull request labels table @@ -85,7 +86,7 @@ def process_pull_requests(pull_requests, task_name, repo_id, logger): pr_dicts, pr_mapping_data, pr_numbers, contributors = extract_data_from_pr_list(pull_requests, repo_id, tool_source, tool_version, data_source) - with GithubTaskSession(logger, engine) as session: + with DatabaseSession(logger, engine) as session: # remove duplicate contributors before inserting contributors = remove_duplicate_dicts(contributors) diff --git a/augur/tasks/github/repo_info/tasks.py b/augur/tasks/github/repo_info/tasks.py index c739cb49d0..37287542c7 100644 --- a/augur/tasks/github/repo_info/tasks.py +++ b/augur/tasks/github/repo_info/tasks.py @@ -1,4 +1,5 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.application.db.session import DatabaseSession from augur.tasks.github.repo_info.core import * from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.util import execute_session_query @@ -19,4 +20,4 @@ def collect_repo_info(repo_git_identifiers: [str]): except Exception as e: session.logger.error(f"Could not add repo info for repo {repo.repo_id}\n Error: {e}") session.logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) \ No newline at end of file + ''.join(traceback.format_exception(None, e, e.__traceback__))) diff --git a/augur/tasks/github/util/github_api_key_handler.py b/augur/tasks/github/util/github_api_key_handler.py index 6bba5764cf..86b6d5202c 100644 --- a/augur/tasks/github/util/github_api_key_handler.py +++ b/augur/tasks/github/util/github_api_key_handler.py @@ -3,9 +3,9 @@ from typing import Optional, List -from augur.application.db.models import WorkerOauth from augur.tasks.util.redis_list import RedisList from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig from augur.tasks.init.celery_app import engine class GithubApiKeyHandler(): @@ -24,6 +24,7 @@ def __init__(self, session: DatabaseSession): self.session = session self.logger = session.logger + self.config = AugurConfig(self.logger, session) self.oauth_redis_key = "oauth_keys_list" @@ -42,7 +43,7 @@ def get_config_key(self) -> str: Github API key from config table """ - return self.session.config.get_value("Keys", "github_api_key") + return self.config.get_value("Keys", "github_api_key") def get_api_keys_from_database(self) -> List[str]: """Retieves all github api keys from database @@ -53,6 +54,8 @@ def get_api_keys_from_database(self) -> List[str]: Returns: Github api keys that are in the database """ + from augur.application.db.models import WorkerOauth + select = WorkerOauth.access_token where = [WorkerOauth.access_token != self.config_key, WorkerOauth.platform == 'github'] diff --git a/augur/tasks/github/util/github_paginator.py b/augur/tasks/github/util/github_paginator.py index 2ad9d3ad46..6734c829bf 100644 --- a/augur/tasks/github/util/github_paginator.py +++ b/augur/tasks/github/util/github_paginator.py @@ -65,17 +65,20 @@ def process_dict_response(logger: logging.Logger, response: httpx.Response, page """ #logger.info("Request returned a dict: {}\n".format(page_data)) - if 'message' not in page_data.keys(): + message = page_data.get('message') + errors = page_data.get('errors') + + if not message and not errors: return GithubApiResult.SUCCESS - if page_data['message'] == "Not Found": + if message == "Not Found": logger.error( "Github repo was not found or does not exist for endpoint: " f"{response.url}" ) return GithubApiResult.REPO_NOT_FOUND - if "You have exceeded a secondary rate limit. Please wait a few minutes before you try again" in page_data['message']: + if message and "You have exceeded a secondary rate limit. Please wait a few minutes before you try again" in message: # sleeps for the specified amount of time that github says to retry after retry_after = int(response.headers["Retry-After"]) @@ -86,7 +89,7 @@ def process_dict_response(logger: logging.Logger, response: httpx.Response, page return GithubApiResult.SECONDARY_RATE_LIMIT # return "do_not_increase_attempts" - if "API rate limit exceeded for user" in page_data['message']: + if message and "API rate limit exceeded for user" in message: current_epoch = int(time.time()) epoch_when_key_resets = int(response.headers["X-RateLimit-Reset"]) @@ -101,7 +104,7 @@ def process_dict_response(logger: logging.Logger, response: httpx.Response, page return GithubApiResult.RATE_LIMIT_EXCEEDED - if "You have triggered an abuse detection mechanism." in page_data['message']: + if message and "You have triggered an abuse detection mechanism." in message: # self.update_rate_limit(response, temporarily_disable=True,platform=platform) @@ -112,11 +115,16 @@ def process_dict_response(logger: logging.Logger, response: httpx.Response, page return GithubApiResult.ABUSE_MECHANISM_TRIGGERED - if page_data['message'] == "Bad credentials": + if message == "Bad credentials": logger.error("\n\n\n\n\n\n\n Bad Token Detected \n\n\n\n\n\n\n") # self.update_rate_limit(response, bad_credentials=True, platform=platform) return GithubApiResult.BAD_CREDENTIALS + if errors: + for error in errors: + if "API rate limit exceeded for user" in error['message']: + return GithubApiResult.RATE_LIMIT_EXCEEDED + return GithubApiResult.NEW_RESULT class GithubApiResult(Enum): diff --git a/augur/tasks/github/util/github_task_session.py b/augur/tasks/github/util/github_task_session.py index 331b65d2e6..a9972219be 100644 --- a/augur/tasks/github/util/github_task_session.py +++ b/augur/tasks/github/util/github_task_session.py @@ -1,9 +1,7 @@ from logging import Logger -from augur.tasks.github.util.github_api_key_handler import GithubApiKeyHandler from augur.tasks.github.util.github_random_key_auth import GithubRandomKeyAuth from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine class GithubTaskSession(DatabaseSession): diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index d1bec5c68a..166b4303e5 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -11,6 +11,7 @@ from augur.application.logs import TaskLogConfig from augur.application.db.session import DatabaseSession +from augur.application.config import AugurConfig from augur.application.db.engine import get_database_string from augur.tasks.init import get_redis_conn_values, get_rabbitmq_conn_string @@ -118,7 +119,9 @@ def setup_periodic_tasks(sender, **kwargs): with DatabaseSession(logger) as session: - collection_interval = session.config.get_value('Tasks', 'collection_interval') + config = AugurConfig(logger, session) + + collection_interval = config.get_value('Tasks', 'collection_interval') logger.info(f"Scheduling collection every {collection_interval/60/60} hours") sender.add_periodic_task(collection_interval, start_task.s()) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index b5f9ccc667..2dae9d8f8c 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -24,9 +24,10 @@ from augur.tasks.git.facade_tasks import * from augur.tasks.db.refresh_materialized_views import * # from augur.tasks.data_analysis import * -from augur.tasks.init.celery_app import celery_app as celery +from augur.tasks.init.celery_app import celery_app as celery, engine from celery.result import allow_join_result from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig from augur.application.db.session import DatabaseSession from augur.tasks.init.celery_app import engine from augur.application.db.util import execute_session_query @@ -70,8 +71,8 @@ def repo_collect_phase(): all_repo_git_identifiers = [repo.repo_git for repo in repos] - #Cluster each repo in groups of 5. - np_clustered_array = np.array_split(all_repo_git_identifiers,math.ceil(len(all_repo_git_identifiers)/50)) + #Cluster each repo in groups of 80. + np_clustered_array = np.array_split(all_repo_git_identifiers,math.ceil(len(all_repo_git_identifiers)/80)) first_pass = np_clustered_array.pop(0).tolist() @@ -96,8 +97,7 @@ def repo_collect_phase(): repo_task_group = group( *repo_info_tasks, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), - generate_facade_chain(logger,first_pass), - *create_grouped_task_load(dataList=first_pass,task=process_dependency_metrics).tasks, + chain(generate_facade_chain(logger,first_pass),create_grouped_task_load(dataList=first_pass,task=process_dependency_metrics)), collect_releases.si() ) @@ -208,8 +208,8 @@ def start_task(): logger = logging.getLogger(start_task.__name__) #Get phase options from the config - with DatabaseSession(logger) as session: - config = session.config + with DatabaseSession(logger, engine) as session: + config = AugurConfig(logger, session) phase_options = config.get_section("Task_Routine") #Get list of enabled phases diff --git a/augur/tasks/util/worker_util.py b/augur/tasks/util/worker_util.py index 427017143a..b0cb335d94 100644 --- a/augur/tasks/util/worker_util.py +++ b/augur/tasks/util/worker_util.py @@ -4,7 +4,6 @@ #import gunicorn.app.base import numpy as np from celery import group -from augur.application.logs import AugurLogger from celery.result import AsyncResult from celery.result import allow_join_result diff --git a/augur/util/repo_load_controller.py b/augur/util/repo_load_controller.py index 60f9f46d7c..7ce9fa7409 100644 --- a/augur/util/repo_load_controller.py +++ b/augur/util/repo_load_controller.py @@ -7,12 +7,9 @@ from typing import List, Any, Dict -from augur.tasks.github.util.github_paginator import hit_api -from augur.tasks.github.util.github_paginator import GithubPaginator -from augur.tasks.github.util.github_task_session import GithubTaskSession -from augur.application.db.session import DatabaseSession from augur.application.db.engine import DatabaseEngine from augur.application.db.models import Repo, UserRepo, RepoGroup, UserGroup, User +from augur.application.db.models.augur_operations import retrieve_org_repos from augur.application.db.util import execute_session_query @@ -76,370 +73,6 @@ def __init__(self, gh_session): self.session = gh_session - def is_valid_repo(self, url: str) -> bool: - """Determine whether repo url is valid. - - Args: - url: repo_url - - Returns - True if repo url is valid and False if not - """ - - if not self.session.oauths.list_of_keys: - return False, {"status": "No valid github api keys to retrieve data with"} - - owner, repo = parse_repo_url(url) - if not owner or not repo: - return False, {"status":"Invalid repo url"} - - url = REPO_ENDPOINT.format(owner, repo) - - attempts = 0 - while attempts < 10: - result = hit_api(self.session.oauths, url, logger) - - # if result is None try again - if not result: - attempts+=1 - continue - - # if there was an error return False - if "message" in result.json().keys(): - return False, {"status": f"Github Error: {result.json()['message']}"} - - return True, {"status": "Valid repo"} - - - def retrieve_org_repos(self, url: str) -> List[str]: - """Get the repos for an org. - - Note: - If the org url is not valid it will return [] - - Args: - url: org url - - Returns - List of valid repo urls or empty list if invalid org - """ - - owner = parse_org_url(url) - if not owner: - return None, {"status": "Invalid owner url"} - - url = ORG_REPOS_ENDPOINT.format(owner) - - repos = [] - with GithubTaskSession(logger) as session: - - if not session.oauths.list_of_keys: - return None, {"status": "No valid github api keys to retrieve data with"} - - for page_data, page in GithubPaginator(url, session.oauths, logger).iter_pages(): - - if page_data is None: - break - - repos.extend(page_data) - - repo_urls = [repo["html_url"] for repo in repos] - - return repo_urls, {"status": "Invalid owner url"} - - - def is_valid_repo_group_id(self, repo_group_id: int) -> bool: - """Deterime is repo_group_id exists. - - Args: - repo_group_id: id from the repo groups table - - Returns: - True if it exists, False if it does not - """ - - query = self.session.query(RepoGroup).filter(RepoGroup.repo_group_id == repo_group_id) - - try: - result = execute_session_query(query, 'one') - except (s.orm.exc.NoResultFound, s.orm.exc.MultipleResultsFound): - return False - - return True - - def add_repo_row(self, url: str, repo_group_id: int, tool_source): - """Add a repo to the repo table. - - Args: - url: repo url - repo_group_id: group to assign repo to - - Note: - If repo row exists then it will update the repo_group_id if param repo_group_id is not a default. If it does not exist is will simply insert the repo. - """ - - if not isinstance(url, str) or not isinstance(repo_group_id, int) or not isinstance(tool_source, str): - return None - - if not self.is_valid_repo_group_id(repo_group_id): - return None - - repo_data = { - "repo_group_id": repo_group_id, - "repo_git": url, - "repo_status": "New", - "tool_source": tool_source, - "tool_version": "1.0", - "data_source": "Git" - } - - repo_unique = ["repo_git"] - return_columns = ["repo_id"] - result = self.session.insert_data(repo_data, Repo, repo_unique, return_columns, on_conflict_update=False) - - if not result: - return None - - if repo_group_id not in DEFAULT_REPO_GROUP_IDS: - # update the repo group id - query = self.session.query(Repo).filter(Repo.repo_git == url) - repo = execute_session_query(query, 'one') - - if not repo.repo_group_id == repo_group_id: - repo.repo_group_id = repo_group_id - self.session.commit() - - return result[0]["repo_id"] - - - def add_repo_to_user_group(self, repo_id: int, group_id:int = 1) -> bool: - """Add a repo to a user in the user_repos table. - - Args: - repo_id: id of repo from repo table - user_id: id of user_id from users table - """ - - if not isinstance(repo_id, int) or not isinstance(group_id, int): - return False - - repo_user_group_data = { - "group_id": group_id, - "repo_id": repo_id - } - - - repo_user_group_unique = ["group_id", "repo_id"] - return_columns = ["group_id", "repo_id"] - - try: - data = self.session.insert_data(repo_user_group_data, UserRepo, repo_user_group_unique, return_columns) - except s.exc.IntegrityError: - return False - - return data[0]["group_id"] == group_id and data[0]["repo_id"] == repo_id - - def add_user_group(self, user_id:int, group_name:str) -> dict: - """Add a group to the user. - - Args - user_id: id of the user - group_name: name of the group being added - - Returns: - Dict with status key that indicates the success of the operation - - Note: - If group already exists the function will return that it has been added, but a duplicate group isn't added. - It simply detects that it already exists and doesn't add it. - """ - - if not isinstance(user_id, int) or not isinstance(group_name, str): - return False, {"status": "Invalid input"} - - user_group_data = { - "name": group_name, - "user_id": user_id - } - - user_group = self.session.query(UserGroup).filter(UserGroup.user_id == user_id, UserGroup.name == group_name).first() - if user_group: - return False, {"status": "Group already exists"} - - try: - result = self.session.insert_data(user_group_data, UserGroup, ["name", "user_id"], return_columns=["group_id"]) - except s.exc.IntegrityError: - return False, {"status": "Error: User id does not exist"} - - - if result: - return True, {"status": "Group created"} - - - return False, {"status": "Error while creating group"} - - def remove_user_group(self, user_id: int, group_name: str) -> dict: - """ Delete a users group of repos. - - Args: - user_id: id of the user - group_name: name of the users group - - Returns: - Dict with a status key that indicates the result of the operation - - """ - - group = self.session.query(UserGroup).filter(UserGroup.name == group_name, UserGroup.user_id == user_id).first() - if not group: - return False, {"status": "WARNING: Trying to delete group that does not exist"} - - # delete rows from user repos with group_id - for repo in group.repos: - self.session.delete(repo) - - # delete group from user groups table - self.session.delete(group) - - self.session.commit() - - return True, {"status": "Group deleted"} - - - def convert_group_name_to_id(self, user_id: int, group_name: str) -> int: - """Convert a users group name to the database group id. - - Args: - user_id: id of the user - group_name: name of the users group - - Returns: - None on failure. The group id on success. - - """ - - if not isinstance(user_id, int) or not isinstance(group_name, str): - return None - - try: - user_group = self.session.query(UserGroup).filter(UserGroup.user_id == user_id, UserGroup.name == group_name).one() - except s.orm.exc.NoResultFound: - return None - - return user_group.group_id - - def get_user_groups(self, user_id: int) -> List: - - return self.session.query(UserGroup).filter(UserGroup.user_id == user_id).all() - - def get_user_group_repos(self, group_id: int) -> List: - user_repos = self.session.query(UserRepo).filter(UserRepo.group_id == group_id).all() - - return [user_repo.repo for user_repo in user_repos] - - - def add_frontend_repo(self, url: List[str], user_id: int, group_name=None, group_id=None, valid_repo=False) -> dict: - """Add list of repos to a users repos. - - Args: - urls: list of repo urls - user_id: id of user_id from users table - group_name: name of group to add repo to. - group_id: id of the group - valid_repo: boolean that indicates whether the repo has already been validated - - Note: - Either the group_name or group_id can be passed not both - - Returns: - Dict that contains the key "status" and additional useful data - """ - - if group_name and group_id: - return False, {"status": "Pass only the group name or group id not both"} - - if group_id is None: - - group_id = self.convert_group_name_to_id(user_id, group_name) - if group_id is None: - return False, {"status": "Invalid group name"} - - if not valid_repo: - result = self.is_valid_repo(url) - if not result[0]: - return False, {"status": result[1]["status"], "repo_url": url} - - repo_id = self.add_repo_row(url, DEFAULT_REPO_GROUP_IDS[0], "Frontend") - if not repo_id: - return False, {"status": "Repo insertion failed", "repo_url": url} - - result = self.add_repo_to_user_group(repo_id, group_id) - - if not result: - return False, {"status": "repo_user insertion failed", "repo_url": url} - - return True, {"status": "Repo Added", "repo_url": url} - - def remove_frontend_repo(self, repo_id:int, user_id:int, group_name:str) -> dict: - """ Remove repo from a users group. - - Args: - repo_id: id of the repo to remove - user_id: id of the user - group_name: name of group the repo is being removed from - - Returns: - Dict with a key of status that indicates the result of the operation - """ - - if not isinstance(repo_id, int) or not isinstance(user_id, int) or not isinstance(group_name, str): - return False, {"status": "Invalid types"} - - group_id = self.convert_group_name_to_id(user_id, group_name) - if group_id is None: - return False, {"status": "Invalid group name"} - - # delete rows from user repos with group_id - self.session.query(UserRepo).filter(UserRepo.group_id == group_id, UserRepo.repo_id == repo_id).delete() - self.session.commit() - - return True, {"status": "Repo Removed"} - - - def add_frontend_org(self, url: List[str], user_id: int, group_name: int): - """Add list of orgs and their repos to a users repos. - - Args: - urls: list of org urls - user_id: id of user_id from users table - """ - group_id = self.convert_group_name_to_id(user_id, group_name) - if group_id is None: - return False, {"status": "Invalid group name"} - - result = self.retrieve_org_repos(url) - if not result[0]: - return False, result[1] - - repos = result[0] - # try to get the repo group with this org name - # if it does not exist create one - failed_repos = [] - for repo in repos: - - result = self.add_frontend_repo(repo, user_id, group_id=group_id, valid_repo=True) - - # keep track of all the repos that failed - if not result[0]: - failed_repos.append(repo) - - failed_count = len(failed_repos) - if failed_count > 0: - # this should never happen because an org should never return invalid repos - return False, {"status": f"{failed_count} repos failed", "repo_urls": failed_repos, "org_url": url} - - return True, {"status": "Org repos added"} - def add_cli_repo(self, repo_data: Dict[str, Any], valid_repo=False): """Add list of repos to specified repo_groups @@ -450,17 +83,17 @@ def add_cli_repo(self, repo_data: Dict[str, Any], valid_repo=False): url = repo_data["url"] repo_group_id = repo_data["repo_group_id"] - if valid_repo or self.is_valid_repo(url)[0]: + if valid_repo or Repo.is_valid_github_repo(self.session, url)[0]: # if the repo doesn't exist it adds it # if the repo does exist it updates the repo_group_id - repo_id = self.add_repo_row(url, repo_group_id, "CLI") + repo_id = Repo.insert(self.session, url, repo_group_id, "CLI") if not repo_id: logger.warning(f"Invalid repo group id specified for {url}, skipping.") return {"status": f"Invalid repo group id specified for {url}, skipping."} - self.add_repo_to_user_group(repo_id) + UserRepo.insert(self.session, repo_id) def add_cli_org(self, org_name): """Add list of orgs and their repos to specified repo_groups @@ -470,8 +103,7 @@ def add_cli_org(self, org_name): """ url = f"https://github.com/{org_name}" - repos = self.retrieve_org_repos(url) - + repos = retrieve_org_repos(self.session, url)[0] if not repos: print( f"No organization with name {org_name} could be found") @@ -502,28 +134,6 @@ def add_cli_org(self, org_name): return {"status": "Org added"} - def get_user_repo_ids(self, user_id: int) -> List[int]: - """Retrieve a list of repos_id for the given user_id. - - Args: - user_id: id of the user - - Returns: - list of repo ids - """ - - user_groups = self.session.query(UserGroup).filter(UserGroup.user_id == user_id).all() - - all_repo_ids = set() - for group in user_groups: - - repo_ids = [user_repo.repo.repo_id for user_repo in group.repos] - all_repo_ids.update(repo_ids) - - - return list(all_repo_ids) - - def paginate_repos(self, source, page=0, page_size=25, sort="repo_id", direction="ASC", **kwargs): if not source: @@ -609,6 +219,7 @@ def get_repo_count(self, source, **kwargs): return result[0]["count"], {"status": "success"} + return query, {"status": "success"} def generate_repo_query(self, source, count, **kwargs): @@ -651,24 +262,20 @@ def generate_repo_query(self, source, count, **kwargs): elif source == "group": - with GithubTaskSession(logger) as session: - - controller = RepoLoadController(session) - - user = kwargs.get("user") - if not user: - print("Func: generate_repo_query. Error: User not specified") - return None, {"status": "User not specified"} + user = kwargs.get("user") + if not user: + print("Func: generate_repo_query. Error: User not specified") + return None, {"status": "User not specified"} - group_name = kwargs.get("group_name") - if not group_name: - print("Func: generate_repo_query. Error: Group name not specified") - return None, {"status": "Group name not specified"} + group_name = kwargs.get("group_name") + if not group_name: + print("Func: generate_repo_query. Error: Group name not specified") + return None, {"status": "Group name not specified"} - group_id = controller.convert_group_name_to_id(user.user_id, group_name) - if group_id is None: - print("Func: generate_repo_query. Error: Group does not exist") - return None, {"status": "Group does not exists"} + group_id = UserGroup.convert_group_name_to_id(self.session, user.user_id, group_name) + if group_id is None: + print("Func: generate_repo_query. Error: Group does not exist") + return None, {"status": "Group does not exists"} query += "\t\t JOIN augur_operations.user_repos ON augur_data.repo.repo_id = augur_operations.user_repos.repo_id\n" query += f"\t\t WHERE augur_operations.user_repos.group_id = {group_id}\n" diff --git a/tests/test_applicaton/test_cli/test_add_cli_repos.py b/tests/test_applicaton/test_cli/test_add_cli_repos.py new file mode 100644 index 0000000000..42f342d2bd --- /dev/null +++ b/tests/test_applicaton/test_cli/test_add_cli_repos.py @@ -0,0 +1,140 @@ +import pytest +import logging + +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.tasks.github.util.github_task_session import GithubTaskSession +from augur.util.repo_load_controller import RepoLoadController, CLI_USER_ID + +DEFAULT_REPO_GROUP_ID = 1 +VALID_ORG = {"org": "CDCgov", "repo_count": 249} + + + +logger = logging.getLogger(__name__) + + +def test_add_cli_repos_with_invalid_repo_group_id(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": "operate-first", "repo_name": "operate-first-twitter", "user_group_name": "test_group", "user_group_id": 1} + url = f"https://github.com/{data['org_name']}/{data['repo_name']}" + + query_statements = [] + query_statements.append(clear_tables_statement) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + repo_data = {"url": url, "repo_group_id": 5} + + controller = RepoLoadController(session) + result = controller.add_cli_repo(repo_data) + assert result["status"] == f"Invalid repo group id specified for {repo_data['url']}, skipping." + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_cli_repos_with_duplicates(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": "operate-first", "repo_name": "operate-first-twitter", "user_group_name": "test_group", "user_group_id": 1} + url = f"https://github.com/{data['org_name']}/{data['repo_name']}" + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + repo_data = {"url": url, "repo_group_id": data["repo_group_id"]} + + controller = RepoLoadController(session) + controller.add_cli_repo(repo_data) + controller.add_cli_repo(repo_data) + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + + assert result is not None + assert len(result) == 1 + assert dict(result[0])["repo_git"] == url + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_cli_org_with_valid_org(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": VALID_ORG["org"], "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + repo_count = None + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + controller = RepoLoadController(session) + + result = controller.add_cli_org(data["org_name"]) + + assert result["status"] == "Org added" + + result2 = controller.add_cli_org("Invalid org") + assert result2["status"] == "No organization found" + + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == VALID_ORG["repo_count"] + + user_repo_result = get_user_repos(connection) + assert user_repo_result is not None + assert len(user_repo_result) == VALID_ORG["repo_count"] + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + + + diff --git a/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py b/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py new file mode 100644 index 0000000000..0a1bd4ceb2 --- /dev/null +++ b/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py @@ -0,0 +1,142 @@ + +import logging +import pytest +import sqlalchemy as s + +from augur.application.db.session import DatabaseSession +from augur.tasks.github.util.github_task_session import GithubTaskSession +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.application.db.models import Repo +logger = logging.getLogger(__name__) + + +def test_parse_github_repo_url(): + + with DatabaseSession(logger) as session: + + assert Repo.parse_github_repo_url("hello world") == (None, None) + assert Repo.parse_github_repo_url("https://github.com/chaoss/hello") == ("chaoss", "hello") + assert Repo.parse_github_repo_url("https://github.com/hello124/augur") == ("hello124", "augur") + assert Repo.parse_github_repo_url("https://github.com//augur") == (None, None) + assert Repo.parse_github_repo_url("https://github.com/chaoss/") == (None, None) + assert Repo.parse_github_repo_url("https://github.com//") == (None, None) + assert Repo.parse_github_repo_url("https://github.com/chaoss/augur") == ("chaoss", "augur") + assert Repo.parse_github_repo_url("https://github.com/chaoss/augur/") == ("chaoss", "augur") + assert Repo.parse_github_repo_url("https://github.com/chaoss/augur.git") == ("chaoss", "augur") + +def test_parse_github_org_url(): + + with DatabaseSession(logger) as session: + + assert Repo.parse_github_org_url("hello world") == None, None + assert Repo.parse_github_org_url("https://github.com/chaoss/") == "chaoss" + assert Repo.parse_github_org_url("https://github.com/chaoss") == "chaoss" + assert Repo.parse_github_org_url("https://github.com/hello124/augur") == None + assert Repo.parse_github_org_url("https://github.com//augur") == None, None + assert Repo.parse_github_org_url("https://github.com//") == None + assert Repo.parse_github_org_url("https://github.com/chaoss/augur") == None + + +def test_is_valid_github_repo(): + + with GithubTaskSession(logger) as session: + + assert Repo.is_valid_github_repo(session, "hello world")[0] is False + assert Repo.is_valid_github_repo(session, "https://github.com/chaoss/hello")[0] is False + assert Repo.is_valid_github_repo(session, "https://github.com/hello124/augur")[0] is False + assert Repo.is_valid_github_repo(session, "https://github.com//augur")[0] is False + assert Repo.is_valid_github_repo(session, "https://github.com/chaoss/")[0] is False + assert Repo.is_valid_github_repo(session, "https://github.com//")[0] is False + assert Repo.is_valid_github_repo(session, "https://github.com/chaoss/augur")[0] is True + assert Repo.is_valid_github_repo(session, "https://github.com/chaoss/augur/")[0] is True + assert Repo.is_valid_github_repo(session, "https://github.com/chaoss/augur.git")[0] is True + assert Repo.is_valid_github_repo(session, "https://github.com/chaoss/augur/")[0] is True + + + +def test_insert_repo(test_db_engine): + + clear_tables = ["repo", "repo_groups"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + data = {"rg_id": 1, + "tool_source": "Frontend", + "repo_urls": ["https://github.com/chaoss/augur", "https://github.com/chaoss/grimoirelab-sortinghat"] + } + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["rg_id"])) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + assert Repo.insert(session, data["repo_urls"][0], data["rg_id"], data["tool_source"]) is not None + assert Repo.insert(session, data["repo_urls"][1], data["rg_id"], data["tool_source"]) is not None + + # invalid rg_id + assert Repo.insert(session, data["repo_urls"][0], 12, data["tool_source"]) is None + + # invalid type for repo url + assert Repo.insert(session, 1, data["rg_id"], data["tool_source"]) is None + + # invalid type for rg_id + assert Repo.insert(session, data["repo_urls"][1], "1", data["tool_source"]) is None + + # invalid type for tool_source + assert Repo.insert(session, data["repo_urls"][1], data["rg_id"], 52) is None + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == len(data["repo_urls"]) + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_repo_row_with_updates(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + data = {"old_rg_id": 1, "new_rg_id": 2, "repo_id": 1, "repo_id_2": 2, "tool_source": "Test", + "repo_url": "https://github.com/chaoss/augur", "repo_url_2": "https://github.com/chaoss/grimoirelab-perceval-opnfv", "repo_status": "Complete"} + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["old_rg_id"])) + query_statements.append(get_repo_group_insert_statement(data["new_rg_id"])) + query_statements.append(get_repo_insert_statement(data["repo_id"], data["old_rg_id"], repo_url=data["repo_url"], repo_status=data["repo_status"])) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + result = Repo.insert(session, data["repo_url"], data["new_rg_id"], data["tool_source"]) is not None + assert result == data["repo_id"] + + with test_db_engine.connect() as connection: + + result = get_repos(connection, where_string=f"WHERE repo_git='{data['repo_url']}'") + assert result is not None + assert len(result) == 1 + + value = dict(result[0]) + assert value["repo_status"] == data["repo_status"] + assert value["repo_group_id"] == data["new_rg_id"] + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) diff --git a/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo_group.py b/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo_group.py new file mode 100644 index 0000000000..4367542db8 --- /dev/null +++ b/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo_group.py @@ -0,0 +1,49 @@ +import logging +import pytest +import sqlalchemy as s + +from augur.application.db.session import DatabaseSession +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.application.db.models import RepoGroup + +logger = logging.getLogger(__name__) + + +def test_is_valid_repo_group_id(test_db_engine): + + clear_tables = ["repo_groups"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + + data = {"rg_ids": [1, 2, 3], "repo_id": 1, "tool_source": "Frontend", + "repo_url": "https://github.com/chaoss/augur"} + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["rg_ids"][0])) + query_statements.append(get_repo_group_insert_statement(data["rg_ids"][1])) + query_statements.append(get_repo_group_insert_statement(data["rg_ids"][2])) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + # valid + assert RepoGroup.is_valid_repo_group_id(session, data["rg_ids"][0]) is True + assert RepoGroup.is_valid_repo_group_id(session, data["rg_ids"][1]) is True + assert RepoGroup.is_valid_repo_group_id(session, data["rg_ids"][2]) is True + + # invalid + assert RepoGroup.is_valid_repo_group_id(session, -1) is False + assert RepoGroup.is_valid_repo_group_id(session, 12) is False + assert RepoGroup.is_valid_repo_group_id(session, 11111) is False + + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + diff --git a/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user.py b/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user.py new file mode 100644 index 0000000000..6011405b26 --- /dev/null +++ b/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user.py @@ -0,0 +1,173 @@ +import logging +import pytest +import sqlalchemy as s +from werkzeug.security import check_password_hash + +from augur.application.db.session import DatabaseSession +from augur.tasks.github.util.github_task_session import GithubTaskSession +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.application.db.models import User + + +logger = logging.getLogger(__name__) + + +def test_get_user(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + username = "user" + user_id = 1 + email = f"{username}@gmail.com" + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id, username, email)) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + # invalid type + assert User.get_user(session, 123) is None + + # invalid user + assert User.get_user(session, "BestUser") is None + + # valid user + user = User.get_user(session, username) + assert user.user_id == user_id + assert user.login_name == username + assert user.email == email + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + +def test_delete_user(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user1 = {"user_id": 3, "user_name": "bare_user", "email": "bare_user@gmail.com"} + user2 = { + "user_id": 2, + "user_name": "groups_user", + "email": "groups_user@gmail.com", + "group": { + "group_id": 2, + "group_name": "second_group" + } + } + user3 = { + "user_id": 1, + "user_name": "full_user", + "email": "full_user@gmail.com", + "group": { + "group_name": "group", + "group_id": 1, + "repo": { + "repo_id": 1, + "repo_group_id": 1 + } + } + } + + query_statements = [] + query_statements.append(clear_tables_statement) + + # create bare user + query_statements.append(get_user_insert_statement(user1["user_id"], user1["user_name"], user1["email"])) + + # # create user with groups + query_statements.append(get_user_insert_statement(user2["user_id"], user2["user_name"], user2["email"])) + query_statements.append(get_user_group_insert_statement(user2["user_id"], user2["group"]["group_name"])) + + # # create user with groups and repos + query_statements.append(get_repo_group_insert_statement(user3["group"]["repo"]["repo_group_id"])) + query_statements.append(get_repo_insert_statement(user3["group"]["repo"]["repo_id"], user3["group"]["repo"]["repo_group_id"])) + query_statements.append(get_user_insert_statement(user3["user_id"], user3["user_name"], user3["email"])) + query_statements.append(get_user_group_insert_statement(user3["user_id"], user3["group"]["group_name"], user3["group"]["group_id"])) + query_statements.append(get_user_repo_insert_statement(user3["group"]["repo"]["repo_id"], user3["group"]["group_id"])) + + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + # delete user with no groups or repos + user1_obj = session.query(User).filter(User.user_id == user1["user_id"]).first() + assert user1_obj.delete(session)[0] is True + + # # delete user with groups, but no repos + user2_obj = session.query(User).filter(User.user_id == user2["user_id"]).first() + assert user2_obj.delete(session)[0] is True + + # # delete user with groups and repos + user3_obj = session.query(User).filter(User.user_id == user3["user_id"]).first() + assert user3_obj.delete(session)[0] is True + + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + +def test_update_user_password(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + username = "user" + user_id = 1 + email = f"{username}@gmail.com" + password = "pass" + new_password = "be++erp@ssw0rd" + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id, username, email, password)) + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + user = session.query(User).filter(User.user_id == 1).first() + + # invalid passowrd + assert user.update_password(session, "wrong passowrd", new_password)[0] is False + + # invalid types + assert user.update_password(session, 1, new_password)[0] is False + assert user.update_password(session, password, 1)[0] is False + + # invalid passowrd + assert user.update_password(session, password, new_password)[0] is True + + with DatabaseSession(logger, test_db_engine) as session: + + user = session.query(User).filter(User.user_id == 1).first() + assert check_password_hash(user.login_hashword, new_password) + + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + diff --git a/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_group.py b/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_group.py new file mode 100644 index 0000000000..70fa11ecb0 --- /dev/null +++ b/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_group.py @@ -0,0 +1,237 @@ +import logging +import pytest +import sqlalchemy as s + +from augur.application.db.session import DatabaseSession +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.application.db.models import UserGroup + +logger = logging.getLogger(__name__) + + +def test_add_user_group(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = { + "users": [ + { + "id": 0, + "username": "user 1", + "email": "email 1" + }, + { + "id": 1, + "username": "user 2", + "email": "email 2" + } + ], + "group_names": ["test_group", "test_group_2"]} + + query_statements = [] + query_statements.append(clear_tables_statement) + + for user in data["users"]: + query_statements.append(get_user_insert_statement(user["id"], user["username"], user["email"])) + + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + # add valid group to user 0 + assert UserGroup.insert(session, data["users"][0]["id"], data["group_names"][0])[0] is True + + # add group again to user 0 ... should be 1 group row still + assert UserGroup.insert(session, data["users"][0]["id"], data["group_names"][0])[0] is False + + # add another valid group to user 0 + assert UserGroup.insert(session, data["users"][0]["id"], data["group_names"][1])[0] is True + + # add same group to user 1 + assert UserGroup.insert(session, data["users"][1]["id"], data["group_names"][0])[0] is True + + + # add with invalid user id + assert UserGroup.insert(session, 130000, data["group_names"][0])[0] is False + + # pass invalid tpyes + assert UserGroup.insert(session, "130000", data["group_names"][0])[0] is False + assert UserGroup.insert(session, data["users"][0]["id"], 133333)[0] is False + + + # end result + # 3 groups in table + # 1 row for user 1 + # 2 rows for user 0 + + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 3 + + query = s.text("""SELECT * FROM "augur_operations"."user_groups" WHERE "user_id"={};""".format(data["users"][0]["id"])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 2 + + query = s.text("""SELECT * FROM "augur_operations"."user_groups" WHERE "user_id"={};""".format(data["users"][1]["id"])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 1 + + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + +def test_convert_group_name_to_id(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id =1 + + groups = [ + { + "group_name": "test group 1", + "group_id": 1 + }, + { + "group_name": "test group 2", + "group_id": 2 + }, + { + "group_name": "test group 3", + "group_id": 3 + }, + ] + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id)) + + for group in groups: + query_statements.append(get_user_group_insert_statement(user_id, group["group_name"], group["group_id"])) + + connection.execute("".join(query_statements)) + + with DatabaseSession(logger, test_db_engine) as session: + + for group in groups: + assert UserGroup.convert_group_name_to_id(session, user_id, group["group_name"]) == group["group_id"] + + # test invalid group name + assert UserGroup.convert_group_name_to_id(session, user_id, "hello") is None + + # test invalid user id + assert UserGroup.convert_group_name_to_id(session, user_id*2, groups[0]["group_name"]) is None + + # test invalid types + assert UserGroup.convert_group_name_to_id(session, user_id, 5) is None + assert UserGroup.convert_group_name_to_id(session, "5", groups[0]["group_name"]) is None + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_remove_user_group(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + user_id =1 + repo_id = 1 + rg_id = 1 + + groups = [ + { + "group_name": "test group 1", + "group_id": 1 + }, + { + "group_name": "test group 2", + "group_id": 2 + }, + { + "group_name": "test group 3", + "group_id": 3 + }, + { + "group_name": "test group 4", + "group_id": 4 + }, + { + "group_name": "test group 5", + "group_id": 5 + } + ] + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_user_insert_statement(user_id)) + + for group in groups: + query_statements.append(get_user_group_insert_statement(user_id, group["group_name"], group["group_id"])) + + query_statements.append(get_repo_group_insert_statement(rg_id)) + query_statements.append(get_repo_insert_statement(repo_id, rg_id)) + query_statements.append(get_user_repo_insert_statement(repo_id, groups[0]["group_id"])) + + connection.execute("".join(query_statements)) + + with DatabaseSession(logger, test_db_engine) as session: + + # try to delete group that doesn't exist + assert UserGroup.delete(session, user_id, "hello")[0] is False + + i = 0 + while(i < len(groups)-2): + assert UserGroup.delete(session, user_id, groups[i]["group_name"])[0] is True + i += 1 + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == len(groups)-i + + + while(i < len(groups)): + + assert UserGroup.delete(session, user_id, groups[i]["group_name"])[0] is True + i += 1 + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) diff --git a/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_repo.py b/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_repo.py new file mode 100644 index 0000000000..3fc5451791 --- /dev/null +++ b/tests/test_applicaton/test_db/test_models/test_augur_operations/test_user_repo.py @@ -0,0 +1,323 @@ +import logging +import pytest +import sqlalchemy as s + + +from augur.application.db.session import DatabaseSession +from augur.tasks.github.util.github_task_session import GithubTaskSession +from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.application.db.models import UserRepo + +logger = logging.getLogger(__name__) +VALID_ORG = {"org": "CDCgov", "repo_count": 249} +DEFAULT_REPO_GROUP_ID = 1 + + +def test_add_repo_to_user_group(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"repo_ids": [1, 2, 3], "repo_urls":["url 1", "url2", "url3"], "user_id": 2, "user_repo_group_id": 1, "user_group_ids": [1, 2], "user_group_names": ["test_group", "test_group_2"]} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["user_repo_group_id"])) + + for i in range(0, len(data["repo_ids"])): + query_statements.append(get_repo_insert_statement(data["repo_ids"][i], data["user_repo_group_id"], data["repo_urls"][i])) + + query_statements.append(get_user_insert_statement(data["user_id"])) + + for i in range(0, len(data["user_group_ids"])): + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_names"][i], data["user_group_ids"][i])) + + query = s.text("".join(query_statements)) + + connection.execute(query) + + with DatabaseSession(logger, test_db_engine) as session: + + # add valid repo to group 0 + assert UserRepo.insert(session, data["repo_ids"][0], data["user_group_ids"][0]) is True + + # add repo again to group 0 ... should be 1 repo row still + assert UserRepo.insert(session, data["repo_ids"][0], data["user_group_ids"][0]) is True + + # add another valid repo to group 0 + assert UserRepo.insert(session, data["repo_ids"][1], data["user_group_ids"][0]) is True + + # add same repo to group 1 + assert UserRepo.insert(session, data["repo_ids"][0], data["user_group_ids"][1]) is True + + # add different repo to group 1 + assert UserRepo.insert(session, data["repo_ids"][2], data["user_group_ids"][1]) is True + + # add with invalid repo id + assert UserRepo.insert(session, 130000, data["user_group_ids"][1]) is False + + # add with invalid group_id + assert UserRepo.insert(session, data["repo_ids"][0], 133333) is False + + # pass invalid tpyes + assert UserRepo.insert(session, "130000", data["user_group_ids"][1]) is False + assert UserRepo.insert(session, data["repo_ids"][0], "133333") is False + + + # end result + # 4 rows in table + # 2 rows in each group + + + with test_db_engine.connect() as connection: + + query = s.text("""SELECT * FROM "augur_operations"."user_repos";""") + # WHERE "group_id"=:user_group_id AND "repo_id"=:repo_id + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 4 + + + query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "group_id"={};""".format(data["user_group_ids"][0])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 2 + + + query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "group_id"={};""".format(data["user_group_ids"][0])) + + result = connection.execute(query).fetchall() + assert result is not None + assert len(result) == 2 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_frontend_repos_with_invalid_repo(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + url = "https://github.com/chaoss/whitepaper" + + data = {"user_id": 2, "repo_group_id": 5, "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + result = UserRepo.add(session, url, data["user_id"], data["user_group_name"]) + + assert result[1]["status"] == "Invalid repo" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_add_frontend_repos_with_duplicates(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + url = "https://github.com/operate-first/operate-first-twitter" + + data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_ID, "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + result = UserRepo.add(session, url, data["user_id"], data["user_group_name"]) + result2 = UserRepo.add(session, url, data["user_id"], data["user_group_name"]) + + # add repo with invalid group name + result3 = UserRepo.add(session, url, data["user_id"], "Invalid group name") + + assert result[1]["status"] == "Repo Added" + assert result2[1]["status"] == "Repo Added" + assert result3[1]["status"] == "Invalid group name" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == 1 + assert dict(result[0])["repo_git"] == url + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + +def test_remove_frontend_repo(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + url = "https://github.com/operate-first/operate-first-twitter" + + data = {"user_id": 2, "repo_id": 5, "repo_group_id": DEFAULT_REPO_GROUP_ID, "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + query_statements.append(get_repo_insert_statement(data["repo_id"], data["repo_group_id"], repo_url="url")) + query_statements.append(get_user_repo_insert_statement(data["repo_id"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + # remove valid user repo + result = UserRepo.delete(session, data["repo_id"], data["user_id"], data["user_group_name"]) + assert result[1]["status"] == "Repo Removed" + + with test_db_engine.connect() as connection: + + repos = get_user_repos(connection) + assert len(repos) == 0 + + # remove invalid group + result = UserRepo.delete(session, data["repo_id"], data["user_id"], "invalid group") + assert result[1]["status"] == "Invalid group name" + + # pass invalid data types + result = UserRepo.delete(session, "5", data["user_id"], data["user_group_name"]) + assert result[1]["status"] == "Invalid types" + + result = UserRepo.delete(session, data["repo_id"], "1", data["user_group_name"]) + assert result[1]["status"] == "Invalid types" + + result = UserRepo.delete(session, data["repo_id"], data["user_id"], 5) + assert result[1]["status"] == "Invalid types" + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + +def test_add_frontend_org_with_invalid_org(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + + data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_ID, "org_name": "chaosssss", "user_group_name": "test_group", "user_group_id": 1} + + with test_db_engine.connect() as connection: + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + with GithubTaskSession(logger, test_db_engine) as session: + + url = f"https://github.com/{data['org_name']}/" + result = UserRepo.add_org_repos(session, url, data["user_id"], data["user_group_name"]) + assert result[1]["status"] == "Invalid owner url" + + # test with invalid group name + result = UserRepo.add_org_repos(session, url, data["user_id"], "Invalid group name") + assert result[1]["status"] == "Invalid group name" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == 0 + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) + + + +def test_add_frontend_org_with_valid_org(test_db_engine): + + clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] + clear_tables_statement = get_repo_related_delete_statements(clear_tables) + + try: + with test_db_engine.connect() as connection: + + data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_ID, "org_name": VALID_ORG["org"], "user_group_name": "test_group", "user_group_id": 1} + + query_statements = [] + query_statements.append(clear_tables_statement) + query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) + query_statements.append(get_user_insert_statement(data["user_id"])) + query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) + + connection.execute("".join(query_statements)) + + add_keys_to_test_db(test_db_engine) + + with GithubTaskSession(logger, test_db_engine) as session: + + url = "https://github.com/{}/".format(data["org_name"]) + result = UserRepo.add_org_repos(session, url, data["user_id"], data["user_group_name"]) + assert result[1]["status"] == "Org repos added" + + with test_db_engine.connect() as connection: + + result = get_repos(connection) + assert result is not None + assert len(result) == VALID_ORG["repo_count"] + + user_repo_result = get_user_repos(connection) + assert user_repo_result is not None + assert len(user_repo_result) == VALID_ORG["repo_count"] + + finally: + with test_db_engine.connect() as connection: + connection.execute(clear_tables_statement) diff --git a/tests/test_applicaton/test_repo_load_controller/helper.py b/tests/test_applicaton/test_repo_load_controller/helper.py index b05be747a7..640819eb2c 100644 --- a/tests/test_applicaton/test_repo_load_controller/helper.py +++ b/tests/test_applicaton/test_repo_load_controller/helper.py @@ -7,6 +7,8 @@ from augur.application.db.models import Config from augur.tasks.github.util.github_paginator import hit_api from augur.application.db.util import execute_session_query +from werkzeug.security import generate_password_hash + logger = logging.getLogger(__name__) @@ -101,16 +103,16 @@ def get_repo_group_insert_statement(rg_id): return """INSERT INTO "augur_data"."repo_groups" ("repo_group_id", "rg_name", "rg_description", "rg_website", "rg_recache", "rg_last_modified", "rg_type", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, 'Default Repo Group', 'The default repo group created by the schema generation script', '', 0, '2019-06-03 15:55:20', 'GitHub Organization', 'load', 'one', 'git', '2019-06-05 13:36:25');""".format(rg_id) -def get_user_insert_statement(user_id, username="bil", email="default@gmail.com"): +def get_user_insert_statement(user_id, username="bil", email="default@gmail.com", password="pass"): - return """INSERT INTO "augur_operations"."users" ("user_id", "login_name", "login_hashword", "email", "first_name", "last_name", "admin") VALUES ({}, '{}', 'pass', '{}', 'bill', 'bob', false);""".format(user_id, username, email) + return """INSERT INTO "augur_operations"."users" ("user_id", "login_name", "login_hashword", "email", "first_name", "last_name", "admin") VALUES ({}, '{}', '{}', '{}', 'bill', 'bob', false);""".format(user_id, username, generate_password_hash(password), email) def get_user_group_insert_statement(user_id, group_name, group_id=None): if group_id: return """INSERT INTO "augur_operations"."user_groups" ("group_id", "user_id", "name") VALUES ({}, {}, '{}');""".format(group_id, user_id, group_name) - return """INSERT INTO "augur_operations"."user_groups" (user_id", "name") VALUES (1, 'default');""".format(user_id, group_name) + return """INSERT INTO "augur_operations"."user_groups" ("user_id", "name") VALUES ({}, '{}');""".format(user_id, group_name) ######## Helper Functions to get retrieve data from tables ################# diff --git a/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py b/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py index 8e9b104b38..1b8effe568 100644 --- a/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py +++ b/tests/test_applicaton/test_repo_load_controller/test_adding_orgs.py @@ -5,144 +5,9 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.util.repo_load_controller import RepoLoadController, DEFAULT_REPO_GROUP_IDS, CLI_USER_ID +from augur.application.db.models import UserRepo logger = logging.getLogger(__name__) -VALID_ORG = {"org": "CDCgov", "repo_count": 246} - - -def test_add_frontend_org_with_invalid_org(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - - data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "org_name": "chaosssss", "user_group_name": "test_group", "user_group_id": 1} - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) - - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - with GithubTaskSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - url = f"https://github.com/{data['org_name']}/" - result = controller.add_frontend_org(url, data["user_id"], data["user_group_name"]) - assert result["status"] == "Invalid org" - - # test with invalid group name - result = controller.add_frontend_org(url, data["user_id"], "Invalid group name") - assert result["status"] == "Invalid group name" - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == 0 - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_frontend_org_with_valid_org(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "org_name": VALID_ORG["org"], "user_group_name": "test_group", "user_group_id": 1} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) - - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - url = "https://github.com/{}/".format(data["org_name"]) - result = RepoLoadController(session).add_frontend_org(url, data["user_id"], data["user_group_name"]) - assert result["status"] == "Org repos added" - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == VALID_ORG["repo_count"] - - user_repo_result = get_user_repos(connection) - assert user_repo_result is not None - assert len(user_repo_result) == VALID_ORG["repo_count"] - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_cli_org_with_valid_org(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": VALID_ORG["org"], "user_group_name": "test_group", "user_group_id": 1} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_name"], data["user_group_id"])) - - connection.execute("".join(query_statements)) - - repo_count = None - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - result = controller.add_cli_org(data["org_name"]) - - assert result["status"] == "Org added" - - result2 = controller.add_cli_org("Invalid org") - assert result2["status"] == "No organization found" - - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == VALID_ORG["repo_count"] - - user_repo_result = get_user_repos(connection) - assert user_repo_result is not None - assert len(user_repo_result) == VALID_ORG["repo_count"] - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - diff --git a/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py b/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py index 9034b42a84..ab9222a920 100644 --- a/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py +++ b/tests/test_applicaton/test_repo_load_controller/test_helper_functions.py @@ -3,723 +3,221 @@ import sqlalchemy as s -from augur.util.repo_load_controller import RepoLoadController, DEFAULT_REPO_GROUP_IDS, CLI_USER_ID - from augur.application.db.session import DatabaseSession from augur.tasks.github.util.github_task_session import GithubTaskSession from tests.test_applicaton.test_repo_load_controller.helper import * +from augur.application.db.models import Repo, RepoGroup, UserRepo, UserGroup logger = logging.getLogger(__name__) -def test_parse_repo_url(): - - with DatabaseSession(logger) as session: - - controller = RepoLoadController(session) - - assert controller.parse_repo_url("hello world") == (None, None) - assert controller.parse_repo_url("https://github.com/chaoss/hello") == ("chaoss", "hello") - assert controller.parse_repo_url("https://github.com/hello124/augur") == ("hello124", "augur") - assert controller.parse_repo_url("https://github.com//augur") == (None, None) - assert controller.parse_repo_url("https://github.com/chaoss/") == (None, None) - assert controller.parse_repo_url("https://github.com//") == (None, None) - assert controller.parse_repo_url("https://github.com/chaoss/augur") == ("chaoss", "augur") - assert controller.parse_repo_url("https://github.com/chaoss/augur/") == ("chaoss", "augur") - assert controller.parse_repo_url("https://github.com/chaoss/augur.git") == ("chaoss", "augur") - - -def test_parse_org_url(): - - with DatabaseSession(logger) as session: - - controller = RepoLoadController(session) - - assert controller.parse_org_url("hello world") == None, None - assert controller.parse_org_url("https://github.com/chaoss/") == "chaoss" - assert controller.parse_org_url("https://github.com/chaoss") == "chaoss" - assert controller.parse_org_url("https://github.com/hello124/augur") == None - assert controller.parse_org_url("https://github.com//augur") == None, None - assert controller.parse_org_url("https://github.com//") == None - assert controller.parse_org_url("https://github.com/chaoss/augur") == None - - -def test_is_valid_repo(): - - with GithubTaskSession(logger) as session: - - controller = RepoLoadController(session) - - assert controller.is_valid_repo("hello world") is False - assert controller.is_valid_repo("https://github.com/chaoss/hello") is False - assert controller.is_valid_repo("https://github.com/hello124/augur") is False - assert controller.is_valid_repo("https://github.com//augur") is False - assert controller.is_valid_repo("https://github.com/chaoss/") is False - assert controller.is_valid_repo("https://github.com//") is False - assert controller.is_valid_repo("https://github.com/chaoss/augur") is True - assert controller.is_valid_repo("https://github.com/chaoss/augur/") is True - assert controller.is_valid_repo("https://github.com/chaoss/augur.git") is True - assert controller.is_valid_repo("https://github.com/chaoss/augur/") is True - -def test_is_valid_repo_group_id(test_db_engine): - - clear_tables = ["repo_groups"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - - - data = {"rg_ids": [1, 2, 3], "repo_id": 1, "tool_source": "Frontend", - "repo_url": "https://github.com/chaoss/augur"} - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["rg_ids"][0])) - query_statements.append(get_repo_group_insert_statement(data["rg_ids"][1])) - query_statements.append(get_repo_group_insert_statement(data["rg_ids"][2])) - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - # valid - assert controller.is_valid_repo_group_id(data["rg_ids"][0]) is True - assert controller.is_valid_repo_group_id(data["rg_ids"][1]) is True - assert controller.is_valid_repo_group_id(data["rg_ids"][2]) is True - - - # invalid - assert controller.is_valid_repo_group_id(-1) is False - assert controller.is_valid_repo_group_id(12) is False - assert controller.is_valid_repo_group_id(11111) is False - - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_repo_row(test_db_engine): - - clear_tables = ["repo", "repo_groups"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - data = {"rg_id": 1, - "tool_source": "Frontend", - "repo_urls": ["https://github.com/chaoss/augur", "https://github.com/chaoss/grimoirelab-sortinghat"] - } - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["rg_id"])) - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - assert RepoLoadController(session).add_repo_row(data["repo_urls"][0], data["rg_id"], data["tool_source"]) is not None - assert RepoLoadController(session).add_repo_row(data["repo_urls"][1], data["rg_id"], data["tool_source"]) is not None - - # invalid rg_id - assert RepoLoadController(session).add_repo_row(data["repo_urls"][0], 12, data["tool_source"]) is None - - # invalid type for repo url - assert RepoLoadController(session).add_repo_row(1, data["rg_id"], data["tool_source"]) is None - - # invalid type for rg_id - assert RepoLoadController(session).add_repo_row(data["repo_urls"][1], "1", data["tool_source"]) is None - - # invalid type for tool_source - assert RepoLoadController(session).add_repo_row(data["repo_urls"][1], data["rg_id"], 52) is None - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == len(data["repo_urls"]) - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_repo_row_with_updates(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - data = {"old_rg_id": 1, "new_rg_id": 2, "repo_id": 1, "repo_id_2": 2, "tool_source": "Test", - "repo_url": "https://github.com/chaoss/augur", "repo_url_2": "https://github.com/chaoss/grimoirelab-perceval-opnfv", "repo_status": "Complete"} - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["old_rg_id"])) - query_statements.append(get_repo_group_insert_statement(data["new_rg_id"])) - query_statements.append(get_repo_insert_statement(data["repo_id"], data["old_rg_id"], repo_url=data["repo_url"], repo_status=data["repo_status"])) - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - result = RepoLoadController(session).add_repo_row(data["repo_url"], data["new_rg_id"], data["tool_source"]) is not None - assert result == data["repo_id"] - - with test_db_engine.connect() as connection: - - result = get_repos(connection, where_string=f"WHERE repo_git='{data['repo_url']}'") - assert result is not None - assert len(result) == 1 - - value = dict(result[0]) - assert value["repo_status"] == data["repo_status"] - assert value["repo_group_id"] == data["new_rg_id"] - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_repo_to_user_group(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"repo_ids": [1, 2, 3], "repo_urls":["url 1", "url2", "url3"], "user_id": 2, "user_repo_group_id": 1, "user_group_ids": [1, 2], "user_group_names": ["test_group", "test_group_2"]} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["user_repo_group_id"])) - - for i in range(0, len(data["repo_ids"])): - query_statements.append(get_repo_insert_statement(data["repo_ids"][i], data["user_repo_group_id"], data["repo_urls"][i])) - - query_statements.append(get_user_insert_statement(data["user_id"])) - - for i in range(0, len(data["user_group_ids"])): - query_statements.append(get_user_group_insert_statement(data["user_id"], data["user_group_names"][i], data["user_group_ids"][i])) - - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - # add valid repo to group 0 - assert controller.add_repo_to_user_group(data["repo_ids"][0], data["user_group_ids"][0]) is True - - # add repo again to group 0 ... should be 1 repo row still - assert controller.add_repo_to_user_group(data["repo_ids"][0], data["user_group_ids"][0]) is True - - # add another valid repo to group 0 - assert controller.add_repo_to_user_group(data["repo_ids"][1], data["user_group_ids"][0]) is True - - # add same repo to group 1 - assert controller.add_repo_to_user_group(data["repo_ids"][0], data["user_group_ids"][1]) is True - - # add different repo to group 1 - assert controller.add_repo_to_user_group(data["repo_ids"][2], data["user_group_ids"][1]) is True - - # add with invalid repo id - assert controller.add_repo_to_user_group(130000, data["user_group_ids"][1]) is False - - # add with invalid group_id - assert controller.add_repo_to_user_group(data["repo_ids"][0], 133333) is False - - # pass invalid tpyes - assert controller.add_repo_to_user_group("130000", data["user_group_ids"][1]) is False - assert controller.add_repo_to_user_group(data["repo_ids"][0], "133333") is False - - - # end result - # 4 rows in table - # 2 rows in each group - - - with test_db_engine.connect() as connection: - - query = s.text("""SELECT * FROM "augur_operations"."user_repos";""") - # WHERE "group_id"=:user_group_id AND "repo_id"=:repo_id - - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 4 - query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "group_id"={};""".format(data["user_group_ids"][0])) - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 2 +# def test_get_user_groups(test_db_engine): - query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "group_id"={};""".format(data["user_group_ids"][0])) +# clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] +# clear_tables_statement = get_repo_related_delete_statements(clear_tables) - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 2 +# try: +# with test_db_engine.connect() as connection: - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_user_group(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = { - "users": [ - { - "id": 0, - "username": "user 1", - "email": "email 1" - }, - { - "id": 1, - "username": "user 2", - "email": "email 2" - } - ], - "group_names": ["test_group", "test_group_2"]} - - query_statements = [] - query_statements.append(clear_tables_statement) - - for user in data["users"]: - query_statements.append(get_user_insert_statement(user["id"], user["username"], user["email"])) +# user_id_1 = 1 +# user_id_2 = 2 - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - # add valid group to user 0 - assert controller.add_user_group(data["users"][0]["id"], data["group_names"][0])["status"] == "Group created" - - # add group again to user 0 ... should be 1 group row still - assert controller.add_user_group(data["users"][0]["id"], data["group_names"][0])["status"] == "Group created" - - # add another valid group to user 0 - assert controller.add_user_group(data["users"][0]["id"], data["group_names"][1])["status"] == "Group created" - - # add same group to user 1 - assert controller.add_user_group(data["users"][1]["id"], data["group_names"][0])["status"] == "Group created" - - - # add with invalid user id - assert controller.add_user_group(130000, data["group_names"][0])["status"] == "Error: User id does not exist" - - # pass invalid tpyes - assert controller.add_user_group("130000", data["group_names"][0])["status"] == "Invalid input" - assert controller.add_user_group(data["users"][0]["id"], 133333)["status"] == "Invalid input" - - - # end result - # 3 groups in table - # 1 row for user 1 - # 2 rows for user 0 - - - with test_db_engine.connect() as connection: - - query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") - - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 3 - - query = s.text("""SELECT * FROM "augur_operations"."user_groups" WHERE "user_id"={};""".format(data["users"][0]["id"])) - - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 2 - - query = s.text("""SELECT * FROM "augur_operations"."user_groups" WHERE "user_id"={};""".format(data["users"][1]["id"])) - - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 1 - - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - - -def test_convert_group_name_to_id(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - user_id =1 - - groups = [ - { - "group_name": "test group 1", - "group_id": 1 - }, - { - "group_name": "test group 2", - "group_id": 2 - }, - { - "group_name": "test group 3", - "group_id": 3 - }, - ] - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_user_insert_statement(user_id)) - - for group in groups: - query_statements.append(get_user_group_insert_statement(user_id, group["group_name"], group["group_id"])) - - connection.execute("".join(query_statements)) - - with GithubTaskSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - for group in groups: - assert controller.convert_group_name_to_id(user_id, group["group_name"]) == group["group_id"] - - # test invalid group name - assert controller.convert_group_name_to_id(user_id, "hello") is None - # test invalid user id - assert controller.convert_group_name_to_id(user_id*2, groups[0]["group_name"]) is None +# groups = [ +# { +# "group_name": "test group 1", +# "group_id": 1 +# }, +# { +# "group_name": "test group 2", +# "group_id": 2 +# }, +# { +# "group_name": "test group 3", +# "group_id": 3 +# }, +# { +# "group_name": "test group 4", +# "group_id": 4 +# }, +# { +# "group_name": "test group 5", +# "group_id": 5 +# } +# ] + +# query_statements = [] +# query_statements.append(clear_tables_statement) +# query_statements.append(get_user_insert_statement(user_id_1)) + +# # add user with no user groups +# query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) + +# for group in groups: +# query_statements.append(get_user_group_insert_statement(user_id_1, group["group_name"], group["group_id"])) + +# connection.execute("".join(query_statements)) + +# with GithubTaskSession(logger, test_db_engine) as session: - # test invalid types - assert controller.convert_group_name_to_id(user_id, 5) is None - assert controller.convert_group_name_to_id("5", groups[0]["group_name"]) is None - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_remove_user_group(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - user_id =1 - repo_id = 1 - rg_id = 1 - - groups = [ - { - "group_name": "test group 1", - "group_id": 1 - }, - { - "group_name": "test group 2", - "group_id": 2 - }, - { - "group_name": "test group 3", - "group_id": 3 - }, - { - "group_name": "test group 4", - "group_id": 4 - }, - { - "group_name": "test group 5", - "group_id": 5 - } - ] - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_user_insert_statement(user_id)) - - for group in groups: - query_statements.append(get_user_group_insert_statement(user_id, group["group_name"], group["group_id"])) - - query_statements.append(get_repo_group_insert_statement(rg_id)) - query_statements.append(get_repo_insert_statement(repo_id, rg_id)) - query_statements.append(get_user_repo_insert_statement(repo_id, groups[0]["group_id"])) - - connection.execute("".join(query_statements)) - - with GithubTaskSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - - assert controller.remove_user_group(user_id, "hello")["status"] == "WARNING: Trying to delete group that does not exist" - - i = 0 - while(i < len(groups)-2): - assert controller.remove_user_group(user_id, groups[i]["group_name"])["status"] == "Group deleted" - i += 1 - - - with test_db_engine.connect() as connection: - - query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") - - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == len(groups)-i - - - while(i < len(groups)): - - assert controller.remove_user_group(user_id, groups[i]["group_name"])["status"] == "Group deleted" - i += 1 - - with test_db_engine.connect() as connection: - - query = s.text("""SELECT * FROM "augur_operations"."user_groups";""") - - result = connection.execute(query).fetchall() - assert result is not None - assert len(result) == 0 - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - - - -def test_get_user_groups(test_db_engine): - - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - user_id_1 = 1 - user_id_2 = 2 - groups = [ - { - "group_name": "test group 1", - "group_id": 1 - }, - { - "group_name": "test group 2", - "group_id": 2 - }, - { - "group_name": "test group 3", - "group_id": 3 - }, - { - "group_name": "test group 4", - "group_id": 4 - }, - { - "group_name": "test group 5", - "group_id": 5 - } - ] - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_user_insert_statement(user_id_1)) - - # add user with no user groups - query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) - - for group in groups: - query_statements.append(get_user_group_insert_statement(user_id_1, group["group_name"], group["group_id"])) - - connection.execute("".join(query_statements)) +# assert len(controller.get_user_groups(user_id_1)) == len(groups) - with GithubTaskSession(logger, test_db_engine) as session: +# assert len(controller.get_user_groups(user_id_2)) == 0 - controller = RepoLoadController(session) - assert len(controller.get_user_groups(user_id_1)) == len(groups) +# with test_db_engine.connect() as connection: - assert len(controller.get_user_groups(user_id_2)) == 0 +# user_group_delete_statement = get_user_group_delete_statement() +# query = s.text(user_group_delete_statement) - - with test_db_engine.connect() as connection: - - user_group_delete_statement = get_user_group_delete_statement() - query = s.text(user_group_delete_statement) - - result = connection.execute(query) +# result = connection.execute(query) - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) +# finally: +# with test_db_engine.connect() as connection: +# connection.execute(clear_tables_statement) -def test_get_user_group_repos(test_db_engine): +# def test_get_user_group_repos(test_db_engine): - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) +# clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] +# clear_tables_statement = get_repo_related_delete_statements(clear_tables) - try: - with test_db_engine.connect() as connection: +# try: +# with test_db_engine.connect() as connection: - user_id =1 - user_id_2 = 2 - group_id = 1 - group_id_2 = 2 - rg_id = 1 - group_name = "test_group 1" - repo_ids = [1, 2, 3, 4, 5] - repo_urls = ["url1", "url2", "url3", "url4", "url5"] +# user_id =1 +# user_id_2 = 2 +# group_id = 1 +# group_id_2 = 2 +# rg_id = 1 +# group_name = "test_group 1" +# repo_ids = [1, 2, 3, 4, 5] +# repo_urls = ["url1", "url2", "url3", "url4", "url5"] - query_statements = [] - query_statements.append(clear_tables_statement) +# query_statements = [] +# query_statements.append(clear_tables_statement) - # add user with a group that has multiple repos - query_statements.append(get_user_insert_statement(user_id)) - query_statements.append(get_user_group_insert_statement(user_id, group_name, group_id)) +# # add user with a group that has multiple repos +# query_statements.append(get_user_insert_statement(user_id)) +# query_statements.append(get_user_group_insert_statement(user_id, group_name, group_id)) - # add user with a group that has no repos - query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) - query_statements.append(get_user_group_insert_statement(user_id_2, group_name, group_id_2)) +# # add user with a group that has no repos +# query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) +# query_statements.append(get_user_group_insert_statement(user_id_2, group_name, group_id_2)) - query_statements.append(get_repo_group_insert_statement(rg_id)) +# query_statements.append(get_repo_group_insert_statement(rg_id)) - for i in range(0, len(repo_ids)): - query_statements.append(get_repo_insert_statement(repo_ids[i], rg_id, repo_urls[i])) - query_statements.append(get_user_repo_insert_statement(repo_ids[i], group_id)) +# for i in range(0, len(repo_ids)): +# query_statements.append(get_repo_insert_statement(repo_ids[i], rg_id, repo_urls[i])) +# query_statements.append(get_user_repo_insert_statement(repo_ids[i], group_id)) - connection.execute("".join(query_statements)) +# connection.execute("".join(query_statements)) - with GithubTaskSession(logger, test_db_engine) as session: +# with GithubTaskSession(logger, test_db_engine) as session: - controller = RepoLoadController(session) + - result = controller.get_user_group_repos(group_id) +# result = controller.get_user_group_repos(group_id) - assert len(result) == len(repo_ids) - assert set([repo.repo_id for repo in result]) == set(repo_ids) +# assert len(result) == len(repo_ids) +# assert set([repo.repo_id for repo in result]) == set(repo_ids) - result = controller.get_user_group_repos(group_id_2) +# result = controller.get_user_group_repos(group_id_2) - assert len(result) == 0 +# assert len(result) == 0 - with test_db_engine.connect() as connection: +# with test_db_engine.connect() as connection: - user_repo_delete_statement = get_user_repo_delete_statement() - query = s.text(user_repo_delete_statement) +# user_repo_delete_statement = get_user_repo_delete_statement() +# query = s.text(user_repo_delete_statement) - result = connection.execute(query) +# result = connection.execute(query) - assert len(controller.get_user_group_repos(group_id)) == 0 +# assert len(controller.get_user_group_repos(group_id)) == 0 - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) +# finally: +# with test_db_engine.connect() as connection: +# connection.execute(clear_tables_statement) -def test_get_user_group_repos(test_db_engine): +# def test_get_user_group_repos(test_db_engine): - clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) +# clear_tables = ["user_repos", "user_groups", "repo", "repo_groups", "users"] +# clear_tables_statement = get_repo_related_delete_statements(clear_tables) - try: - with test_db_engine.connect() as connection: +# try: +# with test_db_engine.connect() as connection: - user_id =1 - user_id_2 = 2 - group_id = 1 - group_id_2 = 2 - rg_id = 1 - group_name = "test_group 1" - repo_ids = [1, 2, 3, 4, 5] - repo_urls = ["url1", "url2", "url3", "url4", "url5"] +# user_id =1 +# user_id_2 = 2 +# group_id = 1 +# group_id_2 = 2 +# rg_id = 1 +# group_name = "test_group 1" +# repo_ids = [1, 2, 3, 4, 5] +# repo_urls = ["url1", "url2", "url3", "url4", "url5"] - query_statements = [] - query_statements.append(clear_tables_statement) +# query_statements = [] +# query_statements.append(clear_tables_statement) - # add user with a group that has multiple repos - query_statements.append(get_user_insert_statement(user_id)) - query_statements.append(get_user_group_insert_statement(user_id, group_name, group_id)) +# # add user with a group that has multiple repos +# query_statements.append(get_user_insert_statement(user_id)) +# query_statements.append(get_user_group_insert_statement(user_id, group_name, group_id)) - # add user with a group that has no repos - query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) +# # add user with a group that has no repos +# query_statements.append(get_user_insert_statement(user_id_2, username="hello", email="hello@gmail.com")) - query_statements.append(get_repo_group_insert_statement(rg_id)) +# query_statements.append(get_repo_group_insert_statement(rg_id)) - for i in range(0, len(repo_ids)): - query_statements.append(get_repo_insert_statement(repo_ids[i], rg_id, repo_urls[i])) - query_statements.append(get_user_repo_insert_statement(repo_ids[i], group_id)) +# for i in range(0, len(repo_ids)): +# query_statements.append(get_repo_insert_statement(repo_ids[i], rg_id, repo_urls[i])) +# query_statements.append(get_user_repo_insert_statement(repo_ids[i], group_id)) - connection.execute("".join(query_statements)) +# connection.execute("".join(query_statements)) - with GithubTaskSession(logger, test_db_engine) as session: +# with GithubTaskSession(logger, test_db_engine) as session: - controller = RepoLoadController(session) + - # test user with a group that has multiple repos - result = controller.get_user_repo_ids(user_id) +# # test user with a group that has multiple repos +# result = controller.get_user_repo_ids(user_id) - assert len(result) == len(repo_ids) - assert set(result) == set(repo_ids) +# assert len(result) == len(repo_ids) +# assert set(result) == set(repo_ids) - # test user without any groups or repos - result = controller.get_user_repo_ids(user_id_2) +# # test user without any groups or repos +# result = controller.get_user_repo_ids(user_id_2) - assert len(result) == 0 +# assert len(result) == 0 - query_statements.append(get_user_group_insert_statement(user_id_2, group_name, group_id_2)) +# query_statements.append(get_user_group_insert_statement(user_id_2, group_name, group_id_2)) - # test user with a group that doesn't have any repos - result = controller.get_user_repo_ids(user_id_2) +# # test user with a group that doesn't have any repos +# result = controller.get_user_repo_ids(user_id_2) - assert len(result) == 0 +# assert len(result) == 0 - with test_db_engine.connect() as connection: +# with test_db_engine.connect() as connection: - user_repo_delete_statement = get_user_repo_delete_statement() - query = s.text(user_repo_delete_statement) +# user_repo_delete_statement = get_user_repo_delete_statement() +# query = s.text(user_repo_delete_statement) - result = connection.execute(query) +# result = connection.execute(query) - assert len(controller.get_user_group_repos(group_id)) == 0 +# assert len(controller.get_user_group_repos(group_id)) == 0 - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) +# finally: +# with test_db_engine.connect() as connection: +# connection.execute(clear_tables_statement) diff --git a/tests/test_applicaton/test_repo_load_controller/test_repo_load_controller.py b/tests/test_applicaton/test_repo_load_controller/test_repo_load_controller.py deleted file mode 100644 index 0905a91fcb..0000000000 --- a/tests/test_applicaton/test_repo_load_controller/test_repo_load_controller.py +++ /dev/null @@ -1,521 +0,0 @@ -import logging -import pytest -import uuid -import sqlalchemy as s - - -from augur.util.repo_load_controller import RepoLoadController, ORG_REPOS_ENDPOINT, DEFAULT_REPO_GROUP_IDS, CLI_USER_ID - -from augur.tasks.github.util.github_task_session import GithubTaskSession -from augur.application.db.session import DatabaseSession -from augur.tasks.github.util.github_paginator import GithubPaginator -from augur.application.db.models import Contributor, Issue, Config -from augur.tasks.github.util.github_paginator import hit_api -from augur.application.db.util import execute_session_query - - -logger = logging.getLogger(__name__) - -VALID_ORG = {"org": "CDCgov", "repo_count": 236} - - -######## Helper Functions to Get Delete statements ################# - -def get_delete_statement(schema, table): - - return """DELETE FROM "{}"."{}";""".format(schema, table) - -def get_repo_delete_statement(): - - return get_delete_statement("augur_data", "repo") - -def get_repo_group_delete_statement(): - - return get_delete_statement("augur_data", "repo_groups") - -def get_user_delete_statement(): - - return get_delete_statement("augur_operations", "users") - -def get_user_repo_delete_statement(): - - return get_delete_statement("augur_operations", "user_repos") - -def get_config_delete_statement(): - - return get_delete_statement("augur_operations", "config") - -def get_repo_related_delete_statements(table_list): - """Takes a list of tables related to the RepoLoadController class and generates a delete statement. - - Args: - table_list: list of table names. Valid table names are - "user_repos" or "user_repo", "repo" or "repos", "repo_groups" or "repo_group:, "user" or "users", and "config" - - """ - - query_list = [] - if "user_repos" in table_list or "user_repo" in table_list: - query_list.append(get_user_repo_delete_statement()) - - if "repos" in table_list or "repo" in table_list: - query_list.append(get_repo_delete_statement()) - - if "repo_groups" in table_list or "repo_group" in table_list: - query_list.append(get_repo_group_delete_statement()) - - if "users" in table_list or "user" in table_list: - query_list.append(get_user_delete_statement()) - - if "config" in table_list: - query_list.append(get_config_delete_statement()) - - return " ".join(query_list) - -######## Helper Functions to add github api keys from prod db to test db ################# -def add_keys_to_test_db(test_db_engine): - - row = None - section_name = "Keys" - setting_name = "github_api_key" - with DatabaseSession(logger) as session: - query = session.query(Config).filter(Config.section_name==section_name, Config.setting_name==setting_name) - row = execute_session_query(query, 'one') - - with DatabaseSession(logger, test_db_engine) as test_session: - new_row = Config(section_name=section_name, setting_name=setting_name, value=row.value, type="str") - test_session.add(new_row) - test_session.commit() - - -######## Helper Functions to get insert statements ################# - -def get_repo_insert_statement(repo_id, rg_id, repo_url="place holder url", repo_status="New"): - - return """INSERT INTO "augur_data"."repo" ("repo_id", "repo_group_id", "repo_git", "repo_path", "repo_name", "repo_added", "repo_status", "repo_type", "url", "owner_id", "description", "primary_language", "created_at", "forked_from", "updated_at", "repo_archived_date_collected", "repo_archived", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, {}, '{}', NULL, NULL, '2022-08-15 21:08:07', '{}', '', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 'CLI', '1.0', 'Git', '2022-08-15 21:08:07');""".format(repo_id, rg_id, repo_url, repo_status) - -def get_repo_group_insert_statement(rg_id): - - return """INSERT INTO "augur_data"."repo_groups" ("repo_group_id", "rg_name", "rg_description", "rg_website", "rg_recache", "rg_last_modified", "rg_type", "tool_source", "tool_version", "data_source", "data_collection_date") VALUES ({}, 'Default Repo Group', 'The default repo group created by the schema generation script', '', 0, '2019-06-03 15:55:20', 'GitHub Organization', 'load', 'one', 'git', '2019-06-05 13:36:25');""".format(rg_id) - -def get_user_insert_statement(user_id): - - return """INSERT INTO "augur_operations"."users" ("user_id", "login_name", "login_hashword", "email", "first_name", "last_name", "admin") VALUES ({}, 'bil', 'pass', 'b@gmil.com', 'bill', 'bob', false);""".format(user_id) - - -######## Helper Functions to get retrieve data from tables ################# - -def get_repos(connection, where_string=None): - - query_list = [] - query_list.append('SELECT * FROM "augur_data"."repo"') - - if where_string: - if where_string.endswith(";"): - query_list.append(where_string[:-1]) - - query_list.append(where_string) - - query_list.append(";") - - query = s.text(" ".join(query_list)) - - return connection.execute(query).fetchall() - -def get_user_repos(connection): - - return connection.execute(s.text("""SELECT * FROM "augur_operations"."user_repos";""")).fetchall() - - -######## Helper Functions to get repos in an org ################# - -def get_org_repos(org_name, session): - - attempts = 0 - while attempts < 10: - result = hit_api(session.oauths, ORG_REPOS_ENDPOINT.format(org_name), logger) - - # if result is None try again - if not result: - attempts += 1 - continue - - response = result.json() - - if response: - return response - - return None - -def get_org_repo_count(org_name, session): - - repos = get_org_repos(org_name, session) - return len(repos) - - -def test_parse_repo_url(): - - with GithubTaskSession(logger) as session: - - controller = RepoLoadController(session) - - assert controller.parse_repo_url("asfsf") == (None, None) - assert controller.parse_repo_url("https://github.com/CDCgov/cdcgov.github.io") == ("CDCgov", "cdcgov.github.io") - assert controller.parse_repo_url("https://github.com/CDCgov/tn93.js") == ("CDCgov", "tn93.js") - - -def test_is_valid_repo(): - - with GithubTaskSession(logger) as session: - - controller = RepoLoadController(session) - - assert controller.is_valid_repo("hello world") is False - assert controller.is_valid_repo("https://github.com/chaoss/hello") is False - assert controller.is_valid_repo("https://github.com/hello124/augur") is False - assert controller.is_valid_repo("https://github.com//augur") is False - assert controller.is_valid_repo("https://github.com/chaoss/") is False - assert controller.is_valid_repo("https://github.com//") is False - assert controller.is_valid_repo("https://github.com/chaoss/augur") is True - assert controller.is_valid_repo("https://github.com/chaoss/augur/") is True - assert controller.is_valid_repo("https://github.com/chaoss/augur.git") is True - assert controller.is_valid_repo("https://github.com/chaoss/wg-value/") is True - - -def test_add_repo_row(test_db_engine): - - clear_tables = ["repo", "repo_groups"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - data = {"rg_id": 1, "repo_id": 1, "tool_source": "Frontend", - "repo_url": "https://github.com/chaoss/augur"} - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["rg_id"])) - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - assert RepoLoadController(session).add_repo_row(data["repo_url"], data["rg_id"], data["tool_source"]) is not None - - with test_db_engine.connect() as connection: - - result = get_repos(connection, where_string=f"WHERE repo_git='{data['repo_url']}'") - assert result is not None - assert len(result) > 0 - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_repo_row_with_updates(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - data = {"old_rg_id": 1, "new_rg_id": 2, "repo_id": 1, "repo_id_2": 2, "tool_source": "Test", - "repo_url": "https://github.com/chaoss/augur", "repo_url_2": "https://github.com/chaoss/grimoirelab-perceval-opnfv", "repo_status": "Complete"} - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["old_rg_id"])) - query_statements.append(get_repo_group_insert_statement(data["new_rg_id"])) - query_statements.append(get_repo_insert_statement(data["repo_id"], data["old_rg_id"], repo_url=data["repo_url"], repo_status=data["repo_status"])) - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - result = RepoLoadController(session).add_repo_row(data["repo_url"], data["new_rg_id"], data["tool_source"]) is not None - assert result == data["repo_id"] - - with test_db_engine.connect() as connection: - - result = get_repos(connection, where_string=f"WHERE repo_git='{data['repo_url']}'") - assert result is not None - assert len(result) == 1 - - value = dict(result[0]) - assert value["repo_status"] == data["repo_status"] - assert value["repo_group_id"] == data["new_rg_id"] - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_repo_to_user(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"repo_id": 1, "user_id": 2, "user_repo_group_id": 1} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["user_repo_group_id"])) - query_statements.append(get_repo_insert_statement(data["repo_id"], data["user_repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - query = s.text("".join(query_statements)) - - connection.execute(query) - - with DatabaseSession(logger, test_db_engine) as session: - - RepoLoadController(session).add_repo_to_user(data["repo_id"], data["user_id"]) - - with test_db_engine.connect() as connection: - - query = s.text("""SELECT * FROM "augur_operations"."user_repos" WHERE "user_id"=:user_id AND "repo_id"=:repo_id""") - - result = connection.execute(query, **data).fetchall() - assert result is not None - assert len(result) > 0 - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_frontend_repos_with_duplicates(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - url = "https://github.com/operate-first/operate-first-twitter" - - data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0]} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - controller = RepoLoadController(session) - controller.add_frontend_repo(url, data["user_id"]) - controller.add_frontend_repo(url, data["user_id"]) - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == 1 - assert dict(result[0])["repo_git"] == url - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_frontend_repos_with_invalid_repo(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - url = "https://github.com/chaoss/whitepaper" - - data = {"user_id": 2, "repo_group_id": 5} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - RepoLoadController(session).add_frontend_repo(url, data["user_id"]) - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == 0 - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_frontend_org_with_invalid_org(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - - data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "org_name": "chaosssss"} - - with test_db_engine.connect() as connection: - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - with GithubTaskSession(logger, test_db_engine) as session: - - url = f"https://github.com/{data['org_name']}/" - controller = RepoLoadController(session).add_frontend_org(url, data["user_id"]) - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == 0 - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_frontend_org_with_valid_org(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"user_id": 2, "repo_group_id": DEFAULT_REPO_GROUP_IDS[0], "org_name": VALID_ORG["org"]} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - url = "https://github.com/{}/".format(data["org_name"]) - RepoLoadController(session).add_frontend_org(url, data["user_id"]) - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == VALID_ORG["repo_count"] - - user_repo_result = get_user_repos(connection) - assert user_repo_result is not None - assert len(user_repo_result) == VALID_ORG["repo_count"] - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_cli_org_with_valid_org(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": VALID_ORG["org"]} - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - - connection.execute("".join(query_statements)) - - repo_count = None - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - RepoLoadController(session).add_cli_org(data["org_name"]) - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - assert result is not None - assert len(result) == VALID_ORG["repo_count"] - - user_repo_result = get_user_repos(connection) - assert user_repo_result is not None - assert len(user_repo_result) == VALID_ORG["repo_count"] - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - - -def test_add_cli_repos_with_duplicates(test_db_engine): - - clear_tables = ["user_repos", "repo", "repo_groups", "users", "config"] - clear_tables_statement = get_repo_related_delete_statements(clear_tables) - - try: - with test_db_engine.connect() as connection: - - data = {"user_id": CLI_USER_ID, "repo_group_id": 5, "org_name": "operate-first", "repo_name": "operate-first-twitter"} - url = f"https://github.com/{data['org_name']}/{data['repo_name']}" - - query_statements = [] - query_statements.append(clear_tables_statement) - query_statements.append(get_repo_group_insert_statement(data["repo_group_id"])) - query_statements.append(get_user_insert_statement(data["user_id"])) - - connection.execute("".join(query_statements)) - - add_keys_to_test_db(test_db_engine) - - with GithubTaskSession(logger, test_db_engine) as session: - - repo_data = {"url": url, "repo_group_id": data["repo_group_id"]} - - controller = RepoLoadController(session) - controller.add_cli_repo(repo_data) - controller.add_cli_repo(repo_data) - - with test_db_engine.connect() as connection: - - result = get_repos(connection) - - assert result is not None - assert len(result) == 1 - assert dict(result[0])["repo_git"] == url - - finally: - with test_db_engine.connect() as connection: - connection.execute(clear_tables_statement) - From 512a1d20f04bf1e3afd15f88b1c67723b805ba81 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Wed, 25 Jan 2023 11:58:51 -0600 Subject: [PATCH 027/134] Fixes to various frontend issues Signed-off-by: Andrew Brain --- augur/api/view/augur_view.py | 8 +++++++- augur/api/view/routes.py | 4 +++- augur/application/db/models/augur_operations.py | 8 ++++++-- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 6930f584c8..a4ede35e30 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -59,7 +59,13 @@ def unauthorized(): @login_manager.user_loader def load_user(user_id): - user = User.get_user(user_id) + + db_session = DatabaseSession(logger) + + user = User.get_user(db_session, user_id) + groups = user.groups + for group in groups: + repos = group.repos if not user: return None diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 0b66401612..0a8caa00cb 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -252,7 +252,9 @@ def repo_repo_view(id): if reports is None: return render_message("Report Definitions Missing", "You requested a report for a repo on this instance, but a definition for the report layout was not found.") - repo = Repo.get_by_id(id) + with DatabaseSession(logger) as db_session: + + repo = Repo.get_by_id(db_session, id) return render_module("repo-info", reports=reports.keys(), images=reports, title="Repo", repo=repo, repo_id=id) diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 75e5a59e6c..c11b5ac66a 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -411,7 +411,9 @@ def remove_group(self, group_name): def add_repo(self, group_name, repo_url): - with DatabaseSession(logger) as session: + from augur.tasks.github.util.github_task_session import GithubTaskSession + + with GithubTaskSession(logger) as session: result = UserRepo.add(session, repo_url, self.user_id, group_name) return result @@ -425,7 +427,9 @@ def remove_repo(self, session, group_name, repo_id): def add_org(self, group_name, org_url): - with DatabaseSession(logger) as session: + from augur.tasks.github.util.github_task_session import GithubTaskSession + + with GithubTaskSession(logger) as session: result = UserRepo.add_org_repos(session, org_url, self.user_id, group_name) return result From 20f2c0d346c6d915fcb4a76d64e5a3bd394510e8 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Wed, 25 Jan 2023 12:01:32 -0600 Subject: [PATCH 028/134] Fixes to various frontend issues (#2149) Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain --- augur/api/view/augur_view.py | 8 +++++++- augur/api/view/routes.py | 4 +++- augur/application/db/models/augur_operations.py | 8 ++++++-- 3 files changed, 16 insertions(+), 4 deletions(-) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 6930f584c8..a4ede35e30 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -59,7 +59,13 @@ def unauthorized(): @login_manager.user_loader def load_user(user_id): - user = User.get_user(user_id) + + db_session = DatabaseSession(logger) + + user = User.get_user(db_session, user_id) + groups = user.groups + for group in groups: + repos = group.repos if not user: return None diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 0b66401612..0a8caa00cb 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -252,7 +252,9 @@ def repo_repo_view(id): if reports is None: return render_message("Report Definitions Missing", "You requested a report for a repo on this instance, but a definition for the report layout was not found.") - repo = Repo.get_by_id(id) + with DatabaseSession(logger) as db_session: + + repo = Repo.get_by_id(db_session, id) return render_module("repo-info", reports=reports.keys(), images=reports, title="Repo", repo=repo, repo_id=id) diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 75e5a59e6c..c11b5ac66a 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -411,7 +411,9 @@ def remove_group(self, group_name): def add_repo(self, group_name, repo_url): - with DatabaseSession(logger) as session: + from augur.tasks.github.util.github_task_session import GithubTaskSession + + with GithubTaskSession(logger) as session: result = UserRepo.add(session, repo_url, self.user_id, group_name) return result @@ -425,7 +427,9 @@ def remove_repo(self, session, group_name, repo_id): def add_org(self, group_name, org_url): - with DatabaseSession(logger) as session: + from augur.tasks.github.util.github_task_session import GithubTaskSession + + with GithubTaskSession(logger) as session: result = UserRepo.add_org_repos(session, org_url, self.user_id, group_name) return result From 7d71da0374cd1ca711183fecc8ed0bf2134eefd9 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Wed, 25 Jan 2023 12:12:25 -0600 Subject: [PATCH 029/134] Fix deleting repo Signed-off-by: Andrew Brain --- augur/api/view/api.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/augur/api/view/api.py b/augur/api/view/api.py index 721c8164ef..f31fbd1057 100644 --- a/augur/api/view/api.py +++ b/augur/api/view/api.py @@ -1,6 +1,7 @@ from flask import Flask, render_template, render_template_string, request, abort, jsonify, redirect, url_for, session, flash from flask_login import current_user, login_required from augur.application.db.models import Repo +from augur.application.db.session import DatabaseSession # from augur.util.repo_load_controller import parse_org_url, parse_repo_url from .utils import * @@ -88,8 +89,10 @@ def user_remove_repo(): repo = int(repo) + with DatabaseSession(logger) as session: + result = current_user.remove_repo(session, group, repo)[0] - if current_user.remove_repo(group, repo)[0]: + if result: flash(f"Successfully removed repo {repo} from group {group}") else: flash("An error occurred removing repo from group") From fcb1fcd868d55e45d562efd3e07551996987a420 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Wed, 25 Jan 2023 12:22:55 -0600 Subject: [PATCH 030/134] Temp fixes Signed-off-by: Andrew Brain --- augur/api/view/augur_view.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index a4ede35e30..113df8e080 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -64,8 +64,14 @@ def load_user(user_id): user = User.get_user(db_session, user_id) groups = user.groups + tokens = user.tokens + applications = user.applications + for application in applications: + sessions = application.sessions for group in groups: repos = group.repos + for token in tokens: + application = token.application if not user: return None From eca4aae9d4ba7237d4c4b18a486357c8ca7f3459 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Wed, 25 Jan 2023 12:44:54 -0600 Subject: [PATCH 031/134] facade changes and skeleton of scaling changes Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 336 +++++++++--------- .../facade04postanalysiscleanup.py | 4 +- .../facade_worker/facade05repofetch.py | 21 +- augur/tasks/github/events/tasks.py | 37 +- augur/tasks/github/facade_github/tasks.py | 182 +++++----- augur/tasks/github/issues/tasks.py | 33 +- augur/tasks/github/messages/tasks.py | 37 +- augur/tasks/github/pull_requests/tasks.py | 26 +- augur/tasks/start_tasks.py | 59 ++- 9 files changed, 392 insertions(+), 343 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index d9daf4571b..5c2b403fe3 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -70,18 +70,18 @@ def facade_analysis_init_facade_task(): session.log_activity('Info',f"Beginning analysis.") @celery.task -def grab_comitters(repo_id_list,platform="github"): +def grab_comitters(repo_id,platform="github"): logger = logging.getLogger(grab_comitters.__name__) - for repo_id in repo_id_list: - try: - grab_committer_list(GithubTaskSession(logger, engine), repo_id,platform) - except Exception as e: - logger.error(f"Could not grab committers from github endpoint!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + + try: + grab_committer_list(GithubTaskSession(logger, engine), repo_id,platform) + except Exception as e: + logger.error(f"Could not grab committers from github endpoint!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") @celery.task -def trim_commits_facade_task(repo_id_list): +def trim_commits_facade_task(repo_id): logger = logging.getLogger(trim_commits_facade_task.__name__) session = FacadeSession(logger) @@ -97,39 +97,38 @@ def update_analysis_log(repos_id,status): except: pass - for repo_id in repo_id_list: - session.inc_repos_processed() - update_analysis_log(repo_id,"Beginning analysis.") - # First we check to see if the previous analysis didn't complete + session.inc_repos_processed() + update_analysis_log(repo_id,"Beginning analysis.") + # First we check to see if the previous analysis didn't complete - get_status = s.sql.text("""SELECT working_commit FROM working_commits WHERE repos_id=:repo_id - """).bindparams(repo_id=repo_id) + get_status = s.sql.text("""SELECT working_commit FROM working_commits WHERE repos_id=:repo_id + """).bindparams(repo_id=repo_id) - try: - working_commits = session.fetchall_data_from_sql_text(get_status) - except: - working_commits = [] + try: + working_commits = session.fetchall_data_from_sql_text(get_status) + except: + working_commits = [] - # If there's a commit still there, the previous run was interrupted and - # the commit data may be incomplete. It should be trimmed, just in case. - for commit in working_commits: - trim_commit(session, repo_id,commit['working_commit']) + # If there's a commit still there, the previous run was interrupted and + # the commit data may be incomplete. It should be trimmed, just in case. + for commit in working_commits: + trim_commit(session, repo_id,commit['working_commit']) - # Remove the working commit. - remove_commit = s.sql.text("""DELETE FROM working_commits - WHERE repos_id = :repo_id AND - working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) - session.execute_sql(remove_commit) - session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") + # Remove the working commit. + remove_commit = s.sql.text("""DELETE FROM working_commits + WHERE repos_id = :repo_id AND + working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) + session.execute_sql(remove_commit) + session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") - # Start the main analysis + # Start the main analysis - update_analysis_log(repo_id,'Collecting data') - logger.info(f"Got past repo {repo_id}") + update_analysis_log(repo_id,'Collecting data') + logger.info(f"Got past repo {repo_id}") @celery.task -def trim_commits_post_analysis_facade_task(repo_ids): +def trim_commits_post_analysis_facade_task(repo_id): logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) @@ -145,72 +144,71 @@ def update_analysis_log(repos_id,status): session.execute_sql(log_message) - for repo_id in repo_ids: - session.logger.info(f"Generating sequence for repo {repo_id}") + session.logger.info(f"Generating sequence for repo {repo_id}") - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - #Get the huge list of commits to process. - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - # Grab the parents of HEAD + #Get the huge list of commits to process. + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + # Grab the parents of HEAD - parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " - "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], - stdout=subprocess.PIPE, shell=True) + parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " + "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], + stdout=subprocess.PIPE, shell=True) - parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) + parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) - # If there are no commits in the range, we still get a blank entry in - # the set. Remove it, as it messes with the calculations + # If there are no commits in the range, we still get a blank entry in + # the set. Remove it, as it messes with the calculations - if '' in parent_commits: - parent_commits.remove('') + if '' in parent_commits: + parent_commits.remove('') - # Grab the existing commits from the database + # Grab the existing commits from the database - existing_commits = set() + existing_commits = set() - find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id - """).bindparams(repo_id=repo_id) + find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id + """).bindparams(repo_id=repo_id) - #session.cfg.cursor.execute(find_existing, (repo[0], )) + #session.cfg.cursor.execute(find_existing, (repo[0], )) - try: - for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): - existing_commits.add(commit['cmt_commit_hash']) - except: - session.log_activity('Info', 'list(cfg.cursor) returned an error') + try: + for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): + existing_commits.add(commit['cmt_commit_hash']) + except: + session.log_activity('Info', 'list(cfg.cursor) returned an error') - # Find missing commits and add them + # Find missing commits and add them - missing_commits = parent_commits - existing_commits + missing_commits = parent_commits - existing_commits - session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") - - # Find commits which are out of the analysis range + session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") + + # Find commits which are out of the analysis range - trimmed_commits = existing_commits - parent_commits + trimmed_commits = existing_commits - parent_commits - update_analysis_log(repo_id,'Data collection complete') + update_analysis_log(repo_id,'Data collection complete') - update_analysis_log(repo_id,'Beginning to trim commits') + update_analysis_log(repo_id,'Beginning to trim commits') - session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") + session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") + + + for commit in trimmed_commits: + trim_commit(session,repo_id,commit) - - for commit in trimmed_commits: - trim_commit(session,repo_id,commit) - - set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' - """).bindparams(repo_id=repo_id) + set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' + """).bindparams(repo_id=repo_id) - session.execute_sql(set_complete) + session.execute_sql(set_complete) - update_analysis_log(repo_id,'Commit trimming complete') + update_analysis_log(repo_id,'Commit trimming complete') - update_analysis_log(repo_id,'Complete') + update_analysis_log(repo_id,'Complete') @@ -231,7 +229,7 @@ def facade_start_contrib_analysis_task(): #enable celery multithreading @celery.task -def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: +def analyze_commits_in_parallel(repo_id, multithreaded: bool)-> None: """Take a large list of commit data to analyze and store in the database. Meant to be run in parallel with other instances of this task. """ @@ -241,82 +239,81 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: session = FacadeSession(logger) start_date = session.get_setting('start_date') - for repo_id in repo_ids: - session.logger.info(f"Generating sequence for repo {repo_id}") - - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') + session.logger.info(f"Generating sequence for repo {repo_id}") + + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - #Get the huge list of commits to process. - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - # Grab the parents of HEAD + #Get the huge list of commits to process. + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + # Grab the parents of HEAD - parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " - "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], - stdout=subprocess.PIPE, shell=True) + parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " + "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], + stdout=subprocess.PIPE, shell=True) - parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) + parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) - # If there are no commits in the range, we still get a blank entry in - # the set. Remove it, as it messes with the calculations + # If there are no commits in the range, we still get a blank entry in + # the set. Remove it, as it messes with the calculations - if '' in parent_commits: - parent_commits.remove('') + if '' in parent_commits: + parent_commits.remove('') - # Grab the existing commits from the database + # Grab the existing commits from the database - existing_commits = set() + existing_commits = set() - find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id - """).bindparams(repo_id=repo_id) + find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id + """).bindparams(repo_id=repo_id) - #session.cfg.cursor.execute(find_existing, (repo[0], )) + #session.cfg.cursor.execute(find_existing, (repo[0], )) - try: - for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): - existing_commits.add(commit['cmt_commit_hash']) - except: - session.log_activity('Info', 'list(cfg.cursor) returned an error') + try: + for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): + existing_commits.add(commit['cmt_commit_hash']) + except: + session.log_activity('Info', 'list(cfg.cursor) returned an error') - # Find missing commits and add them + # Find missing commits and add them - missing_commits = parent_commits - existing_commits + missing_commits = parent_commits - existing_commits - session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") - - queue = [] - if len(missing_commits) > 0: - #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) - - #encode the repo_id with the commit. - commits = [commit for commit in list(missing_commits)] - #Get all missing commits into one large list to split into task pools - queue.extend(commits) - else: - return - - logger.info(f"Got to analysis!") - - for count, commitTuple in enumerate(queue): - quarterQueue = int(len(queue) / 4) + session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") + + queue = [] + if len(missing_commits) > 0: + #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) + + #encode the repo_id with the commit. + commits = [commit for commit in list(missing_commits)] + #Get all missing commits into one large list to split into task pools + queue.extend(commits) + else: + return + + logger.info(f"Got to analysis!") + + for count, commitTuple in enumerate(queue): + quarterQueue = int(len(queue) / 4) - if quarterQueue == 0: - quarterQueue = 1 # prevent division by zero with integer math + if quarterQueue == 0: + quarterQueue = 1 # prevent division by zero with integer math - #Log progress when another quarter of the queue has been processed - if (count + 1) % quarterQueue == 0: - logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") + #Log progress when another quarter of the queue has been processed + if (count + 1) % quarterQueue == 0: + logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query,'one') + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query,'one') - logger.info(f"Got to analysis!") - - for count, commitTuple in enumerate(queue): + logger.info(f"Got to analysis!") + + for count, commitTuple in enumerate(queue): - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - analyze_commit(session, repo_id, repo_loc, commitTuple) + analyze_commit(session, repo_id, repo_loc, commitTuple) logger.info("Analysis complete") return @@ -350,49 +347,49 @@ def rebuild_unknown_affiliation_and_web_caches_facade_task(): rebuild_unknown_affiliation_and_web_caches(session) @celery.task -def force_repo_analysis_facade_task(repo_git_identifiers): +def force_repo_analysis_facade_task(repo_git): logger = logging.getLogger(force_repo_analysis_facade_task.__name__) with FacadeSession(logger) as session: - force_repo_analysis(session, repo_git_identifiers) + force_repo_analysis(session,repo_git) @celery.task -def git_repo_cleanup_facade_task(repo_git_identifiers): +def git_repo_cleanup_facade_task(repo_git): logger = logging.getLogger(git_repo_cleanup_facade_task.__name__) with FacadeSession(logger) as session: - git_repo_cleanup(session, repo_git_identifiers) + git_repo_cleanup(session, repo_git) @celery.task -def git_repo_initialize_facade_task(repo_git_identifiers): +def git_repo_initialize_facade_task(repo_git): logger = logging.getLogger(git_repo_initialize_facade_task.__name__) with FacadeSession(logger) as session: - git_repo_initialize(session, repo_git_identifiers) + git_repo_initialize(session, repo_git) @celery.task -def check_for_repo_updates_facade_task(repo_git_identifiers): +def check_for_repo_updates_facade_task(repo_git): logger = logging.getLogger(check_for_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: - check_for_repo_updates(session, repo_git_identifiers) + check_for_repo_updates(session, repo_git) @celery.task -def force_repo_updates_facade_task(repo_git_identifiers): +def force_repo_updates_facade_task(repo_git): logger = logging.getLogger(force_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: - force_repo_updates(session, repo_git_identifiers) + force_repo_updates(session, repo_git) @celery.task -def git_repo_updates_facade_task(repo_git_identifiers): +def git_repo_updates_facade_task(repo_git): logger = logging.getLogger(git_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: - git_repo_updates(session, repo_git_identifiers) + git_repo_updates(session, repo_git) -def generate_analysis_sequence(logger,repo_git_identifiers): +def generate_analysis_sequence(logger,repo_git): """Run the analysis by looping over all active repos. For each repo, we retrieve the list of commits which lead to HEAD. If any are missing from the database, they are filled in. Then we check to see if any commits in the database are @@ -409,26 +406,28 @@ def generate_analysis_sequence(logger,repo_git_identifiers): with FacadeSession(logger) as session: repo_list = s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo - WHERE repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + WHERE repo_git=:value""").bindparams(value=repo_git) repos = session.fetchall_data_from_sql_text(repo_list) start_date = session.get_setting('start_date') repo_ids = [repo['repo_id'] for repo in repos] + repo_id = repo_ids.pop(0) + #determine amount of celery tasks to run at once in each grouped task load concurrentTasks = int((-1 * (15/(len(repo_ids)+1))) + 15) logger.info(f"Scheduling concurrent layers {concurrentTasks} tasks at a time.") analysis_sequence.append(facade_analysis_init_facade_task.si()) - analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=grab_comitters,processes=concurrentTasks)) + analysis_sequence.append(grab_comitters.si(repo_id)) - analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=trim_commits_facade_task,processes=concurrentTasks)) + analysis_sequence.append(trim_commits_facade_task.si(repo_id)) - analysis_sequence.append(create_grouped_task_load(True,dataList=repo_ids,task=analyze_commits_in_parallel,processes=concurrentTasks)) + analysis_sequence.append(analyze_commits_in_parallel.si(repo_id,True)) - analysis_sequence.append(create_grouped_task_load(dataList=repo_ids,task=trim_commits_post_analysis_facade_task,processes=concurrentTasks)) + analysis_sequence.append(trim_commits_post_analysis_facade_task.si(repo_id)) analysis_sequence.append(facade_analysis_end_facade_task.si()) @@ -438,31 +437,34 @@ def generate_analysis_sequence(logger,repo_git_identifiers): -def generate_contributor_sequence(logger,repo_git_identifiers): +def generate_contributor_sequence(logger,repo_git): contributor_sequence = [] - all_repo_ids = [] + #all_repo_ids = [] + repo_id = None with FacadeSession(logger) as session: #contributor_sequence.append(facade_start_contrib_analysis_task.si()) query = s.sql.text("""SELECT repo_id FROM repo - WHERE repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + WHERE repo_git=:value""").bindparams(value=repo_git) - all_repos = session.fetchall_data_from_sql_text(query) + repo = execute_session_query(query,'one')#all_repos = session.fetchall_data_from_sql_text(query) + repo_id = repo.repo_id #pdb.set_trace() #breakpoint() #for repo in all_repos: # contributor_sequence.append(insert_facade_contributors.si(repo['repo_id'])) - all_repo_ids = [repo['repo_id'] for repo in all_repos] + #all_repo_ids = [repo['repo_id'] for repo in all_repos] - contrib_group = create_grouped_task_load(dataList=all_repo_ids,task=insert_facade_contributors)#group(contributor_sequence) - contrib_group.link_error(facade_error_handler.s()) - return contrib_group#chain(facade_start_contrib_analysis_task.si(), contrib_group) + #contrib_group = create_grouped_task_load(dataList=all_repo_ids,task=insert_facade_contributors)#group(contributor_sequence) + #contrib_group.link_error(facade_error_handler.s()) + #return contrib_group#chain(facade_start_contrib_analysis_task.si(), contrib_group) + return insert_facade_contributors.si(repo_id) -def generate_facade_chain(logger,repo_git_identifiers): +def generate_facade_chain(logger,repo_git): #raise NotImplemented logger.info("Generating facade sequence") @@ -491,28 +493,28 @@ def generate_facade_chain(logger,repo_git_identifiers): facade_sequence = [] if not limited_run or (limited_run and delete_marked_repos): - facade_sequence.append(git_repo_cleanup_facade_task.si(repo_git_identifiers))#git_repo_cleanup(session,repo_git_identifiers) + facade_sequence.append(git_repo_cleanup_facade_task.si(repo_git))#git_repo_cleanup(session,repo_git_identifiers) if not limited_run or (limited_run and clone_repos): - facade_sequence.append(git_repo_initialize_facade_task.si(repo_git_identifiers))#git_repo_initialize(session,repo_git_identifiers) + facade_sequence.append(git_repo_initialize_facade_task.si(repo_git))#git_repo_initialize(session,repo_git_identifiers) if not limited_run or (limited_run and check_updates): - facade_sequence.append(check_for_repo_updates_facade_task.si(repo_git_identifiers))#check_for_repo_updates(session,repo_git_identifiers) + facade_sequence.append(check_for_repo_updates_facade_task.si(repo_git))#check_for_repo_updates(session,repo_git_identifiers) if force_updates: - facade_sequence.append(force_repo_updates_facade_task.si(repo_git_identifiers)) + facade_sequence.append(force_repo_updates_facade_task.si(repo_git)) if not limited_run or (limited_run and pull_repos): - facade_sequence.append(git_repo_updates_facade_task.si(repo_git_identifiers)) + facade_sequence.append(git_repo_updates_facade_task.si(repo_git)) if force_analysis: - facade_sequence.append(force_repo_analysis_facade_task.si(repo_git_identifiers)) + facade_sequence.append(force_repo_analysis_facade_task.si(repo_git)) #Generate commit analysis task order. - facade_sequence.extend(generate_analysis_sequence(logger,repo_git_identifiers)) + facade_sequence.extend(generate_analysis_sequence(logger,repo_git)) #Generate contributor analysis task group. - facade_sequence.append(generate_contributor_sequence(logger,repo_git_identifiers)) + facade_sequence.append(generate_contributor_sequence(logger,repo_git)) if nuke_stored_affiliations: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py b/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py index 7c61ac4fdc..f1e4ea23eb 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade04postanalysiscleanup.py @@ -40,7 +40,7 @@ from augur.application.db.util import execute_session_query from augur.application.db.models import * -def git_repo_cleanup(session,repo_git_identifiers): +def git_repo_cleanup(session,repo_git): # Clean up any git repos that are pending deletion @@ -50,7 +50,7 @@ def git_repo_cleanup(session,repo_git_identifiers): query = session.query(Repo).filter( - Repo.repo_git.in_(repo_git_identifiers),Repo.repo_status == "Delete")#s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo WHERE repo_status='Delete'""") + Repo.repo_git == repo_git,Repo.repo_status == "Delete")#s.sql.text("""SELECT repo_id,repo_group_id,repo_path,repo_name FROM repo WHERE repo_status='Delete'""") delete_repos = execute_session_query(query,'all')#session.fetchall_data_from_sql_text(query) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index aa23a18ad7..ccd5bd8c20 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -41,7 +41,7 @@ from augur.application.db.models.augur_data import * from augur.application.db.util import execute_session_query, convert_orm_list_to_dict_list -def git_repo_initialize(session, repo_git_identifiers,repo_group_id=None): +def git_repo_initialize(session, repo_git,repo_group_id=None): # Select any new git repos so we can set up their locations and git clone # Select any new git repos so we can set up their locations and git clone @@ -51,7 +51,7 @@ def git_repo_initialize(session, repo_git_identifiers,repo_group_id=None): session.log_activity('Info','Fetching non-cloned repos') query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git FROM repo WHERE repo_status LIKE 'New%' - AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + AND repo_git=:value""").bindparams(value=repo_git) #Get data as a list of dicts @@ -62,7 +62,7 @@ def git_repo_initialize(session, repo_git_identifiers,repo_group_id=None): #query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git FROM repo WHERE repo_status LIKE 'New%'""") - query = session.query(Repo).filter('New' in Repo.repo_status) + query = session.query(Repo).filter('New' in Repo.repo_status, Repo.repo_git == repo_git) result = execute_session_query(query, 'all') for repo in result: @@ -73,6 +73,7 @@ def git_repo_initialize(session, repo_git_identifiers,repo_group_id=None): pass new_repos.append(repo_dict) + for row in new_repos: session.log_activity('Info',f"Fetching repos with repo group id: {row['repo_group_id']}") @@ -259,7 +260,7 @@ def check_for_repo_updates(session,repo_git_identifiers): session.log_activity('Info','Checking repos to update (complete)') -def force_repo_updates(session,repo_git_identifiers): +def force_repo_updates(session,repo_git): # Set the status of all non-new repos to "Update". @@ -268,28 +269,26 @@ def force_repo_updates(session,repo_git_identifiers): get_repo_ids = s.sql.text("""UPDATE repo SET repo_status='Update' WHERE repo_status NOT LIKE 'New%' AND repo_status!='Delete' AND repo_status !='Empty' - AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + AND repo_git=:value""").bindparams(value=repo_git) session.execute_sql(get_repo_ids) session.log_activity('Info','Forcing repos to update (complete)') -def force_repo_analysis(session,repo_git_identifiers): - -# Set the status of all non-new repos to "Analyze". +def force_repo_analysis(session,repo_git): session.update_status('Forcing all non-new repos to be analyzed') session.log_activity('Info','Forcing repos to be analyzed') set_to_analyze = s.sql.text("""UPDATE repo SET repo_status='Analyze' WHERE repo_status NOT LIKE 'New%' AND repo_status!='Delete' AND repo_status != 'Empty' - AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + AND repo_git=:repo_git_ident""").bindparams(repo_git_ident=repo_git) session.execute_sql(set_to_analyze) session.log_activity('Info','Forcing repos to be analyzed (complete)') -def git_repo_updates(session,repo_git_identifiers): +def git_repo_updates(session,repo_git): # Update existing repos @@ -299,7 +298,7 @@ def git_repo_updates(session,repo_git_identifiers): #query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git,repo_name,repo_path FROM repo WHERE # repo_status='Update'""") query = query = session.query(Repo).filter( - Repo.repo_git.in_(repo_git_identifiers),Repo.repo_status == 'Update') + Repo.repo_git == repo_git,Repo.repo_status == 'Update') result = execute_session_query(query, 'all') existing_repos = convert_orm_list_to_dict_list(result)#session.fetchall_data_from_sql_text(query)#list(cfg.cursor) diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 17e1efd917..ccf28f20d3 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -14,38 +14,35 @@ platform_id = 1 - @celery.task -def collect_events(repo_git_identifiers: [str]): +def collect_events(repo_git: str): logger = logging.getLogger(collect_events.__name__) with DatabaseSession(logger, engine) as session: - for repo_git in repo_git_identifiers: - - try: - - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_id = repo_obj.repo_id + try: + + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id - owner, repo = get_owner_repo(repo_git) + owner, repo = get_owner_repo(repo_git) - logger.info(f"Collecting Github events for {owner}/{repo}") + logger.info(f"Collecting Github events for {owner}/{repo}") - url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" + url = f"https://api.github.com/repos/{owner}/{repo}/issues/events" - event_data = retrieve_all_event_data(repo_git, logger) + event_data = retrieve_all_event_data(repo_git, logger) - if event_data: - - process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) + if event_data: + + process_events(event_data, f"{owner}/{repo}: Event task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no events") - except Exception as e: - logger.error(f"Could not collect events for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + else: + logger.info(f"{owner}/{repo} has no events") + except Exception as e: + logger.error(f"Could not collect events for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_event_data(repo_git: str, logger): diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 74c2aa139c..896321c3c9 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -225,7 +225,7 @@ def link_commits_to_contributor(session,contributorQueue): # Update the contributors table from the data facade has gathered. @celery.task -def insert_facade_contributors(repo_id_list): +def insert_facade_contributors(repo_id): logger = logging.getLogger(insert_facade_contributors.__name__) with GithubTaskSession(logger, engine) as session: @@ -234,107 +234,103 @@ def insert_facade_contributors(repo_id_list): # Get all of the commit data's emails and names from the commit table that do not appear # in the contributors table or the contributors_aliases table. - for repo_id in repo_id_list: - - session.logger.info( - "Beginning process to insert contributors from facade commits for repo w entry info: {}\n".format(repo_id)) - new_contrib_sql = s.sql.text(""" - SELECT DISTINCT - commits.cmt_author_name AS NAME, - commits.cmt_commit_hash AS hash, - commits.cmt_author_raw_email AS email_raw, - 'not_unresolved' as resolution_status - FROM - commits - WHERE - commits.repo_id = :repo_id - AND (NOT EXISTS ( SELECT contributors.cntrb_canonical FROM contributors WHERE contributors.cntrb_canonical = commits.cmt_author_raw_email ) - or NOT EXISTS ( SELECT contributors_aliases.alias_email from contributors_aliases where contributors_aliases.alias_email = commits.cmt_author_raw_email) - AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name )) - GROUP BY - commits.cmt_author_name, - commits.cmt_commit_hash, - commits.cmt_author_raw_email - UNION - SELECT DISTINCT - commits.cmt_author_name AS NAME,--commits.cmt_id AS id, - commits.cmt_commit_hash AS hash, - commits.cmt_author_raw_email AS email_raw, - 'unresolved' as resolution_status - FROM - commits - WHERE - commits.repo_id = :repo_id - AND EXISTS ( SELECT unresolved_commit_emails.email FROM unresolved_commit_emails WHERE unresolved_commit_emails.email = commits.cmt_author_raw_email ) - AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name ) - GROUP BY - commits.cmt_author_name, - commits.cmt_commit_hash, - commits.cmt_author_raw_email - ORDER BY - hash - """).bindparams(repo_id=repo_id) - - #Execute statement with session. - result = session.execute_sql(new_contrib_sql).fetchall() - new_contribs = [dict(zip(row.keys(), row)) for row in result] - - #print(new_contribs) - - #json.loads(pd.read_sql(new_contrib_sql, self.db, params={ - # 'repo_id': repo_id}).to_json(orient="records")) - - - - process_commit_metadata(session,list(new_contribs),repo_id) - - session.logger.debug("DEBUG: Got through the new_contribs") - - - with FacadeSession(logger) as session: - - for repo_id in repo_id_list: - # sql query used to find corresponding cntrb_id's of emails found in the contributor's table - # i.e., if a contributor already exists, we use it! - resolve_email_to_cntrb_id_sql = s.sql.text(""" + session.logger.info( + "Beginning process to insert contributors from facade commits for repo w entry info: {}\n".format(repo_id)) + new_contrib_sql = s.sql.text(""" SELECT DISTINCT - cntrb_id, - contributors.cntrb_login AS login, - contributors.cntrb_canonical AS email, - commits.cmt_author_raw_email + commits.cmt_author_name AS NAME, + commits.cmt_commit_hash AS hash, + commits.cmt_author_raw_email AS email_raw, + 'not_unresolved' as resolution_status FROM - contributors, commits WHERE - contributors.cntrb_canonical = commits.cmt_author_raw_email - AND commits.repo_id = :repo_id + commits.repo_id = :repo_id + AND (NOT EXISTS ( SELECT contributors.cntrb_canonical FROM contributors WHERE contributors.cntrb_canonical = commits.cmt_author_raw_email ) + or NOT EXISTS ( SELECT contributors_aliases.alias_email from contributors_aliases where contributors_aliases.alias_email = commits.cmt_author_raw_email) + AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name )) + GROUP BY + commits.cmt_author_name, + commits.cmt_commit_hash, + commits.cmt_author_raw_email UNION SELECT DISTINCT - contributors_aliases.cntrb_id, - contributors.cntrb_login as login, - contributors_aliases.alias_email AS email, - commits.cmt_author_raw_email + commits.cmt_author_name AS NAME,--commits.cmt_id AS id, + commits.cmt_commit_hash AS hash, + commits.cmt_author_raw_email AS email_raw, + 'unresolved' as resolution_status FROM - contributors, - contributors_aliases, commits WHERE - contributors_aliases.alias_email = commits.cmt_author_raw_email - AND contributors.cntrb_id = contributors_aliases.cntrb_id - AND commits.repo_id = :repo_id - """).bindparams(repo_id=repo_id) - - #self.logger.info("DEBUG: got passed the sql statement declaration") - # Get a list of dicts that contain the emails and cntrb_id's of commits that appear in the contributor's table. - #existing_cntrb_emails = json.loads(pd.read_sql(resolve_email_to_cntrb_id_sql, self.db, params={ - # 'repo_id': repo_id}).to_json(orient="records")) - - result = session.execute_sql(resolve_email_to_cntrb_id_sql).fetchall() - existing_cntrb_emails = [dict(zip(row.keys(), row)) for row in result] - - print(existing_cntrb_emails) - link_commits_to_contributor(session,list(existing_cntrb_emails)) + commits.repo_id = :repo_id + AND EXISTS ( SELECT unresolved_commit_emails.email FROM unresolved_commit_emails WHERE unresolved_commit_emails.email = commits.cmt_author_raw_email ) + AND ( commits.cmt_author_name ) IN ( SELECT C.cmt_author_name FROM commits AS C WHERE C.repo_id = :repo_id GROUP BY C.cmt_author_name ) + GROUP BY + commits.cmt_author_name, + commits.cmt_commit_hash, + commits.cmt_author_raw_email + ORDER BY + hash + """).bindparams(repo_id=repo_id) + + #Execute statement with session. + result = session.execute_sql(new_contrib_sql).fetchall() + new_contribs = [dict(zip(row.keys(), row)) for row in result] + + #print(new_contribs) + + #json.loads(pd.read_sql(new_contrib_sql, self.db, params={ + # 'repo_id': repo_id}).to_json(orient="records")) + + + + process_commit_metadata(session,list(new_contribs),repo_id) + + session.logger.debug("DEBUG: Got through the new_contribs") - session.logger.info("Done with inserting and updating facade contributors") + + with FacadeSession(logger) as session: + # sql query used to find corresponding cntrb_id's of emails found in the contributor's table + # i.e., if a contributor already exists, we use it! + resolve_email_to_cntrb_id_sql = s.sql.text(""" + SELECT DISTINCT + cntrb_id, + contributors.cntrb_login AS login, + contributors.cntrb_canonical AS email, + commits.cmt_author_raw_email + FROM + contributors, + commits + WHERE + contributors.cntrb_canonical = commits.cmt_author_raw_email + AND commits.repo_id = :repo_id + UNION + SELECT DISTINCT + contributors_aliases.cntrb_id, + contributors.cntrb_login as login, + contributors_aliases.alias_email AS email, + commits.cmt_author_raw_email + FROM + contributors, + contributors_aliases, + commits + WHERE + contributors_aliases.alias_email = commits.cmt_author_raw_email + AND contributors.cntrb_id = contributors_aliases.cntrb_id + AND commits.repo_id = :repo_id + """).bindparams(repo_id=repo_id) + + #self.logger.info("DEBUG: got passed the sql statement declaration") + # Get a list of dicts that contain the emails and cntrb_id's of commits that appear in the contributor's table. + #existing_cntrb_emails = json.loads(pd.read_sql(resolve_email_to_cntrb_id_sql, self.db, params={ + # 'repo_id': repo_id}).to_json(orient="records")) + + result = session.execute_sql(resolve_email_to_cntrb_id_sql).fetchall() + existing_cntrb_emails = [dict(zip(row.keys(), row)) for row in result] + + print(existing_cntrb_emails) + link_commits_to_contributor(session,list(existing_cntrb_emails)) + + session.logger.info("Done with inserting and updating facade contributors") return diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index 416ddfc22d..299dfa75a2 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -19,31 +19,30 @@ development = get_development_flag() @celery.task -def collect_issues(repo_git_identifiers: [str]) -> None: +def collect_issues(repo_git : str) -> None: logger = logging.getLogger(collect_issues.__name__) with DatabaseSession(logger, engine) as session: - for repo_git in repo_git_identifiers: - try: - - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_id = repo_obj.repo_id + try: + + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id - owner, repo = get_owner_repo(repo_git) - - issue_data = retrieve_all_issue_data(repo_git, logger) + owner, repo = get_owner_repo(repo_git) + + issue_data = retrieve_all_issue_data(repo_git, logger) - if issue_data: - - process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) + if issue_data: + + process_issues(issue_data, f"{owner}/{repo}: Issue task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no issues") - except Exception as e: - logger.error(f"Could not collect issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + else: + logger.info(f"{owner}/{repo} has no issues") + except Exception as e: + logger.error(f"Could not collect issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_issue_data(repo_git, logger) -> None: diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index 537d273984..b8120022bd 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -18,29 +18,28 @@ @celery.task -def collect_github_messages(repo_git_identifiers: [str]) -> None: +def collect_github_messages(repo_git: str) -> None: logger = logging.getLogger(collect_github_messages.__name__) with DatabaseSession(logger, engine) as session: - - for repo_git in repo_git_identifiers: - try: - - repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id - - owner, repo = get_owner_repo(repo_git) - message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) - - if message_data: - - process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) - - else: - logger.info(f"{owner}/{repo} has no messages") - except Exception as e: - logger.error(f"Could not collect github messages for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + + try: + + repo_id = session.query(Repo).filter( + Repo.repo_git == repo_git).one().repo_id + + owner, repo = get_owner_repo(repo_git) + message_data = retrieve_all_pr_and_issue_messages(repo_git, logger) + + if message_data: + + process_messages(message_data, f"{owner}/{repo}: Message task", repo_id, logger) + + else: + logger.info(f"{owner}/{repo} has no messages") + except Exception as e: + logger.error(f"Could not collect github messages for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 9ec691595b..8d9e362549 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -18,27 +18,27 @@ @celery.task -def collect_pull_requests(repo_git_identifiers: [str]) -> None: +def collect_pull_requests(repo_git: str) -> None: logger = logging.getLogger(collect_pull_requests.__name__) with DatabaseSession(logger, engine) as session: - for repo_git in repo_git_identifiers: - try: + + try: - repo_id = session.query(Repo).filter( - Repo.repo_git == repo_git).one().repo_id + repo_id = session.query(Repo).filter( + Repo.repo_git == repo_git).one().repo_id - owner, repo = get_owner_repo(repo_git) - pr_data = retrieve_all_pr_data(repo_git, logger) + owner, repo = get_owner_repo(repo_git) + pr_data = retrieve_all_pr_data(repo_git, logger) - if pr_data: - process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) - else: - logger.info(f"{owner}/{repo} has no pull requests") - except Exception as e: - logger.error(f"Could not collect pull requests for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + if pr_data: + process_pull_requests(pr_data, f"{owner}/{repo}: Pr task", repo_id, logger) + else: + logger.info(f"{owner}/{repo} has no pull requests") + except Exception as e: + logger.error(f"Could not collect pull requests for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") # TODO: Rename pull_request_reviewers table to pull_request_requested_reviewers diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 2dae9d8f8c..106d490d1a 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -32,6 +32,7 @@ from augur.tasks.init.celery_app import engine from augur.application.db.util import execute_session_query from logging import Logger +from augur.tasks.util.redis_list import RedisList CELERY_GROUP_TYPE = type(group()) CELERY_CHAIN_TYPE = type(chain()) @@ -201,7 +202,7 @@ def start_data_collection(self): augur_collection_chain = chain(*augur_collection_sequence) augur_collection_chain.apply_async() - +""" @celery.task def start_task(): @@ -220,8 +221,64 @@ def start_task(): augur_collection = AugurTaskRoutine(collection_phases=enabled_phases) augur_collection.start_data_collection() +""" + +task_list_name = "augur_task_ids" + +#Wrap each task in a bind celery task to return its id +@celery.task() +def collection_task_wrapper(self,*args,**kwargs): + task = kwargs.pop('task') + + task(*args,**kwargs) + + return self.request.id + + +@celery.task +def task_success(successResult): + logger = logging.getLogger(successResult.__name__) + + # remove the task id from Redis + task_id_list = RedisList(task_list_name) + try: + task_id_list.remove(successResult) + except Exception as e: + logger.error(f"Could not remove id {successResult} from redis. Error: {e}") + + # set status to Finished in db + # set collection date in db +@celery.task +def task_failed(request,exc,traceback): + logger = logging.getLogger(task_failed.__name__) + + # remove the task id from Redis + task_id_list = RedisList(task_list_name) + try: + task_id_list.remove(successResult) + except Exception as e: + logger.error(f"Could not remove id {successResult} from redis. Error: {e}") + + # set status to Error in db + # log traceback to error file + + +@celery.task +def augur_collection_monitor(): + raise NotImplementedError + # calculate current active repos + # calcuate the number of repos we would like to add to the queue + + # get repos with these requirements + # haven't been collected or not collected in awhile + # don't have a status of Error or Collecting + + # loop through repos + # create chain + # start task + # set status in db to Collecting From 3d246060d0ac091143f457b95dbbec8a9e00a465 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Wed, 25 Jan 2023 14:19:05 -0600 Subject: [PATCH 032/134] Fix session passing Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/api/routes/user.py | 20 +++++++++----------- augur/api/view/routes.py | 6 ++++-- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 1d7c689166..4c133de451 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -139,22 +139,20 @@ def generate_session(application): if not username: return jsonify({"status": "Invalid authorization code"}) - user = User.get_user(username) - if not user: - return jsonify({"status": "Invalid user"}) + with DatabaseSession(logger) as session: - seconds_to_expire = 86400 + user = User.get_user(session, username) + if not user: + return jsonify({"status": "Invalid user"}) - with DatabaseSession(logger) as session: + seconds_to_expire = 86400 existing_session = session.query(UserSessionToken).filter(UserSessionToken.user_id == user.user_id, UserSessionToken.application_id == application.id).first() if existing_session: existing_session.delete_refresh_tokens(session) - - - user_session_token = UserSessionToken.create(user.user_id, application.id, seconds_to_expire).token - refresh_token = RefreshToken.create(user_session_token) + user_session_token = UserSessionToken.create(session, user.user_id, application.id, seconds_to_expire).token + refresh_token = RefreshToken.create(session, user_session_token) response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) response.headers["Cache-Control"] = "no-store" @@ -183,8 +181,8 @@ def refresh_session(application): user_session = refresh_token.user_session user = user_session.user - new_user_session = UserSessionToken.create(user.user_id, user_session.application.id) - new_refresh_token = RefreshToken.create(new_user_session.token) + new_user_session = UserSessionToken.create(session, user.user_id, user_session.application.id) + new_refresh_token = RefreshToken.create(session, new_user_session.token) session.delete(refresh_token) session.delete(user_session) diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 0a8caa00cb..78a461a1d6 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -216,9 +216,11 @@ def authorize_user(): if not client_id or response_type != "code": return render_message("Invalid Request", "Something went wrong. You may need to return to the previous application and make the request again.") + + with DatabaseSession(logger) as session: - # TODO get application from client id - client = ClientApplication.get_by_id(client_id) + # TODO get application from client id + client = ClientApplication.get_by_id(session, client_id) return render_module("authorization", app = client, state = state) From f660e7e594f6a09386a81bf388bce768bbeab636 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Wed, 25 Jan 2023 14:23:25 -0600 Subject: [PATCH 033/134] Reorganization of collection phases Signed-off-by: Isaac Milarsky --- augur/tasks/git/dependency_tasks/tasks.py | 6 +- augur/tasks/github/detect_move/tasks.py | 21 ++- augur/tasks/github/events/tasks.py | 1 - augur/tasks/github/issues/tasks.py | 3 +- augur/tasks/github/messages/tasks.py | 3 +- .../pull_requests/commits_model/tasks.py | 19 +- .../github/pull_requests/files_model/tasks.py | 20 +-- augur/tasks/github/pull_requests/tasks.py | 2 +- augur/tasks/github/repo_info/tasks.py | 23 ++- augur/tasks/start_tasks.py | 170 ++++++------------ 10 files changed, 103 insertions(+), 165 deletions(-) diff --git a/augur/tasks/git/dependency_tasks/tasks.py b/augur/tasks/git/dependency_tasks/tasks.py index 9dcb8b3463..611070f380 100644 --- a/augur/tasks/git/dependency_tasks/tasks.py +++ b/augur/tasks/git/dependency_tasks/tasks.py @@ -7,14 +7,12 @@ @celery.task -def process_dependency_metrics(repo_git_identifiers): +def process_dependency_metrics(repo_git): #raise NotImplementedError logger = logging.getLogger(process_dependency_metrics.__name__) - session = DatabaseSession(logger) - - for repo_git in repo_git_identifiers: + with DatabaseSession(logger) as session: query = session.query(Repo).filter(Repo.repo_git == repo_git) repo = execute_session_query(query,'one') diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index 69c268a001..5f96b22b63 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -5,19 +5,18 @@ -@celery.task -def detect_github_repo_move(repo_git_identifiers : [str]) -> None: +@celery.task() +def detect_github_repo_move(repo_git : str) -> None: logger = logging.getLogger(detect_github_repo_move.__name__) - logger.info(f"Starting repo_move operation with {repo_git_identifiers}") + logger.info(f"Starting repo_move operation with {repo_git}") with GithubTaskSession(logger, engine) as session: #Ping each repo with the given repo_git to make sure #that they are still in place. - for repo_git in repo_git_identifiers: - try: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - logger.info(f"Pinging repo: {repo_git}") - ping_github_for_repo_move(session, repo) - except Exception as e: - logger.error(f"Could not check repo source for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") \ No newline at end of file + try: + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + logger.info(f"Pinging repo: {repo_git}") + ping_github_for_repo_move(session, repo) + except Exception as e: + logger.error(f"Could not check repo source for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") \ No newline at end of file diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index ccf28f20d3..3b77bb7be5 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -14,7 +14,6 @@ platform_id = 1 -@celery.task def collect_events(repo_git: str): logger = logging.getLogger(collect_events.__name__) diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index 299dfa75a2..6553a1a9a2 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -18,7 +18,7 @@ from augur.application.db.util import execute_session_query development = get_development_flag() -@celery.task +@celery.task() def collect_issues(repo_git : str) -> None: logger = logging.getLogger(collect_issues.__name__) @@ -45,6 +45,7 @@ def collect_issues(repo_git : str) -> None: logger.error(f"Could not collect issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + def retrieve_all_issue_data(repo_git, logger) -> None: owner, repo = get_owner_repo(repo_git) diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index b8120022bd..9290da9236 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -17,7 +17,7 @@ platform_id = 1 -@celery.task +@celery.task() def collect_github_messages(repo_git: str) -> None: logger = logging.getLogger(collect_github_messages.__name__) @@ -42,6 +42,7 @@ def collect_github_messages(repo_git: str) -> None: logger.error(f"Could not collect github messages for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: owner, repo = get_owner_repo(repo_git) diff --git a/augur/tasks/github/pull_requests/commits_model/tasks.py b/augur/tasks/github/pull_requests/commits_model/tasks.py index 93e3eaba99..06e2e9e854 100644 --- a/augur/tasks/github/pull_requests/commits_model/tasks.py +++ b/augur/tasks/github/pull_requests/commits_model/tasks.py @@ -6,17 +6,16 @@ from augur.application.db.util import execute_session_query -@celery.task -def process_pull_request_commits(repo_git_identifiers: [str]) -> None: +@celery.task() +def process_pull_request_commits(repo_git: str) -> None: logger = logging.getLogger(process_pull_request_commits.__name__) with DatabaseSession(logger, engine) as session: - for repo_git in repo_git_identifiers: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - pull_request_commits_model(repo.repo_id, logger) - except Exception as e: - logger.error(f"Could not complete pull_request_commits_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - raise e + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + pull_request_commits_model(repo.repo_id, logger) + except Exception as e: + logger.error(f"Could not complete pull_request_commits_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + raise e diff --git a/augur/tasks/github/pull_requests/files_model/tasks.py b/augur/tasks/github/pull_requests/files_model/tasks.py index 813f71116c..554959d147 100644 --- a/augur/tasks/github/pull_requests/files_model/tasks.py +++ b/augur/tasks/github/pull_requests/files_model/tasks.py @@ -5,17 +5,15 @@ from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.util import execute_session_query -@celery.task -def process_pull_request_files(repo_git_identifiers: str) -> None: +@celery.task() +def process_pull_request_files(repo_git: str) -> None: logger = logging.getLogger(process_pull_request_files.__name__) with DatabaseSession(logger, engine) as session: - - for repo_git in repo_git_identifiers: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - pull_request_files_model(repo.repo_id, logger) - except Exception as e: - logger.error(f"Could not complete pull_request_files_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") - #raise e + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + pull_request_files_model(repo.repo_id, logger) + except Exception as e: + logger.error(f"Could not complete pull_request_files_model!\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + #raise e \ No newline at end of file diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 8d9e362549..9a8ecfb33b 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -17,7 +17,7 @@ platform_id = 1 -@celery.task +@celery.task() def collect_pull_requests(repo_git: str) -> None: logger = logging.getLogger(collect_pull_requests.__name__) diff --git a/augur/tasks/github/repo_info/tasks.py b/augur/tasks/github/repo_info/tasks.py index 37287542c7..aef3a0f57c 100644 --- a/augur/tasks/github/repo_info/tasks.py +++ b/augur/tasks/github/repo_info/tasks.py @@ -5,19 +5,18 @@ from augur.application.db.util import execute_session_query import traceback -@celery.task -def collect_repo_info(repo_git_identifiers: [str]): +@celery.task() +def collect_repo_info(repo_git: str): logger = logging.getLogger(collect_repo_info.__name__) with GithubTaskSession(logger, engine) as session: - - for repo_git in repo_git_identifiers: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo = execute_session_query(query, 'one') - try: - repo_info_model(session, repo) - except Exception as e: - session.logger.error(f"Could not add repo info for repo {repo.repo_id}\n Error: {e}") - session.logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo = execute_session_query(query, 'one') + try: + repo_info_model(session, repo) + except Exception as e: + session.logger.error(f"Could not add repo info for repo {repo.repo_id}\n Error: {e}") + session.logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) + diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 106d490d1a..2d079101cb 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -37,25 +37,48 @@ CELERY_GROUP_TYPE = type(group()) CELERY_CHAIN_TYPE = type(chain()) +""" +@celery.task(bind=True) +def collection_task_wrapper(self,*args,**kwargs): + task = kwargs.pop('task') + + task(*args,**kwargs) + + return self.request.id +""" + +@celery.task +def task_success(repo_git): + logger = logging.getLogger(successResult.__name__) + + # set status to Finished in db + # set collection date in db + +@celery.task +def task_failed(request,exc,traceback): + logger = logging.getLogger(task_failed.__name__) + + # set status to Error in db + # log traceback to error file + + #Predefine phases. For new phases edit this and the config to reflect. #The domain of tasks ran should be very explicit. -@celery.task -def prelim_phase(): +def prelim_phase(repo_git): logger = logging.getLogger(prelim_phase.__name__) - + job = None with DatabaseSession(logger) as session: - query = session.query(Repo) - repos = execute_session_query(query, 'all') - repo_git_list = [repo.repo_git for repo in repos] + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') - result = create_grouped_task_load(dataList=repo_git_list,task=detect_github_repo_move).apply_async() - - with allow_join_result(): - return result.get() + #TODO: if repo has moved mark it as pending. + job = detect_github_repo_move.si(repo_obj.repo_git) -@celery.task -def repo_collect_phase(): + return job + + +def repo_collect_phase(repo_git): logger = logging.getLogger(repo_collect_phase.__name__) #Here the term issues also includes prs. This list is a bunch of chains that run in parallel to process issue data. @@ -67,81 +90,32 @@ def repo_collect_phase(): #A chain is needed for each repo. with DatabaseSession(logger) as session: - query = session.query(Repo) - repos = execute_session_query(query, 'all') - + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_git = repo_obj.repo_git - all_repo_git_identifiers = [repo.repo_git for repo in repos] - #Cluster each repo in groups of 80. - np_clustered_array = np.array_split(all_repo_git_identifiers,math.ceil(len(all_repo_git_identifiers)/80)) + repo_info_task = collect_repo_info.si(repo_git)#collection_task_wrapper(self) - first_pass = np_clustered_array.pop(0).tolist() - - logger.info(f"Scheduling groups of {len(first_pass)}") - #Pool the tasks for collecting repo info. - repo_info_tasks = create_grouped_task_load(dataList=first_pass, task=collect_repo_info).tasks - - #pool the repo collection jobs that should be ran first and have deps. primary_repo_jobs = group( - *create_grouped_task_load(dataList=first_pass, task=collect_issues).tasks, - *create_grouped_task_load(dataList=first_pass, task=collect_pull_requests).tasks + collect_issues.si(repo_git), + collect_pull_requests.si(repo_git) ) secondary_repo_jobs = group( - *create_grouped_task_load(dataList=first_pass, task=collect_events).tasks, - *create_grouped_task_load(dataList=first_pass,task=collect_github_messages).tasks, - *create_grouped_task_load(dataList=first_pass, task=process_pull_request_files).tasks, - *create_grouped_task_load(dataList=first_pass, task=process_pull_request_commits).tasks + collect_events.si(repo_git),#*create_grouped_task_load(dataList=first_pass, task=collect_events).tasks, + collect_github_messages.si(repo_git),#*create_grouped_task_load(dataList=first_pass,task=collect_github_messages).tasks, + process_pull_request_files.si(repo_git),#*create_grouped_task_load(dataList=first_pass, task=process_pull_request_files).tasks, + process_pull_request_commits.si(repo_git)#*create_grouped_task_load(dataList=first_pass, task=process_pull_request_commits).tasks ) - repo_task_group = group( - *repo_info_tasks, + repo_info_task, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), - chain(generate_facade_chain(logger,first_pass),create_grouped_task_load(dataList=first_pass,task=process_dependency_metrics)), + chain(generate_facade_chain(logger,repo_git),process_dependency_metrics.si(repo_git)), collect_releases.si() ) - - result = chain(repo_task_group, refresh_materialized_views.si()).apply_async() - - with allow_join_result(): - result.wait() - - if len(np_clustered_array) == 0: - return - - - for cluster in np_clustered_array: - additionalPass = cluster.tolist() - #Pool the tasks for collecting repo info. - repo_info_tasks = create_grouped_task_load(dataList=additionalPass, task=collect_repo_info).tasks - - #pool the repo collection jobs that should be ran first and have deps. - primary_repo_jobs = group( - *create_grouped_task_load(dataList=additionalPass, task=collect_issues).tasks, - *create_grouped_task_load(dataList=additionalPass, task=collect_pull_requests).tasks - ) - - secondary_repo_jobs = group( - *create_grouped_task_load(dataList=additionalPass, task=collect_events).tasks, - *create_grouped_task_load(dataList=additionalPass,task=collect_github_messages).tasks, - *create_grouped_task_load(dataList=additionalPass, task=process_pull_request_files).tasks, - *create_grouped_task_load(dataList=additionalPass, task=process_pull_request_commits).tasks - ) - - repo_task_group = group( - *repo_info_tasks, - chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), - generate_facade_chain(logger,additionalPass), - *create_grouped_task_load(dataList=additionalPass,task=process_dependency_metrics).tasks - ) - - result = chain(repo_task_group, refresh_materialized_views.si()).apply_async() - - with allow_join_result(): - result.wait() - return + return repo_task_group DEFINED_COLLECTION_PHASES = [prelim_phase, repo_collect_phase] @@ -223,51 +197,21 @@ def start_task(): augur_collection.start_data_collection() """ -task_list_name = "augur_task_ids" - -#Wrap each task in a bind celery task to return its id -@celery.task() -def collection_task_wrapper(self,*args,**kwargs): - task = kwargs.pop('task') - - task(*args,**kwargs) - - return self.request.id @celery.task -def task_success(successResult): - logger = logging.getLogger(successResult.__name__) - - # remove the task id from Redis - task_id_list = RedisList(task_list_name) - try: - task_id_list.remove(successResult) - except Exception as e: - logger.error(f"Could not remove id {successResult} from redis. Error: {e}") - - # set status to Finished in db - # set collection date in db - +def augur_collection_monitor(): + logger = logging.getLogger(augur_collection_monitor.__name__) -@celery.task -def task_failed(request,exc,traceback): - logger = logging.getLogger(task_failed.__name__) + #Get phase options from the config + with DatabaseSession(logger, engine) as session: + config = AugurConfig(logger, session) + phase_options = config.get_section("Task_Routine") - # remove the task id from Redis - task_id_list = RedisList(task_list_name) - try: - task_id_list.remove(successResult) - except Exception as e: - logger.error(f"Could not remove id {successResult} from redis. Error: {e}") + #Get list of enabled phases + enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] + enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] - # set status to Error in db - # log traceback to error file - - -@celery.task -def augur_collection_monitor(): - raise NotImplementedError # calculate current active repos # calcuate the number of repos we would like to add to the queue From 75755e71fb2e204bd5f72c0829bb76fbcf1fb723 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Wed, 25 Jan 2023 14:30:18 -0600 Subject: [PATCH 034/134] Possible fix Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/api/routes/user.py | 3 +++ augur/api/view/augur_view.py | 6 +----- augur/application/db/models/augur_operations.py | 3 --- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 4c133de451..591cdd2d79 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -151,6 +151,9 @@ def generate_session(application): if existing_session: existing_session.delete_refresh_tokens(session) + session.delete(existing_session) + session.commit() + user_session_token = UserSessionToken.create(session, user.user_id, application.id, seconds_to_expire).token refresh_token = RefreshToken.create(session, user_session_token) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 948f37aba0..3afa315a11 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -64,7 +64,6 @@ def load_user(user_id): user = User.get_user(db_session, user_id) groups = user.groups -<<<<<<< 8knot_fixes tokens = user.tokens applications = user.applications for application in applications: @@ -73,10 +72,7 @@ def load_user(user_id): repos = group.repos for token in tokens: application = token.application -======= - for group in groups: - repos = group.repos ->>>>>>> dev + db_session.expunge(user) if not user: return None diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index c11b5ac66a..3f3b8566f7 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -839,9 +839,6 @@ def delete_refresh_tokens(self, session): session.delete(token) session.commit() - session.delete(self) - session.commit() - class ClientApplication(Base): __tablename__ = "client_applications" __table_args__ = ( From 51a6b694dcffd96c3ae9d2e895213b1a35da5c16 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Wed, 25 Jan 2023 15:14:47 -0600 Subject: [PATCH 035/134] Fix dumb error Signed-off-by: Andrew Brain --- augur/api/routes/user.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 591cdd2d79..f3707238bd 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -157,8 +157,8 @@ def generate_session(application): user_session_token = UserSessionToken.create(session, user.user_id, application.id, seconds_to_expire).token refresh_token = RefreshToken.create(session, user_session_token) - response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) - response.headers["Cache-Control"] = "no-store" + response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) + response.headers["Cache-Control"] = "no-store" return response From f46f4bee61aa9c1fb1ec43767801ac0612241e5d Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Wed, 25 Jan 2023 15:25:14 -0600 Subject: [PATCH 036/134] Fix session out of scope issue Signed-off-by: Andrew Brain --- augur/api/routes/user.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index f3707238bd..3362d61211 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -173,25 +173,26 @@ def refresh_session(application): if request.args.get("grant_type") != "refresh_token": return jsonify({"status": "Invalid grant type"}) - session = Session() - refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() - if not refresh_token: - return jsonify({"status": "Invalid refresh token"}) + with DatabaseSession(logger) as session: - if refresh_token.user_session.application == application: - return jsonify({"status": "Applications do not match"}) + refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() + if not refresh_token: + return jsonify({"status": "Invalid refresh token"}) - user_session = refresh_token.user_session - user = user_session.user + if refresh_token.user_session.application == application: + return jsonify({"status": "Applications do not match"}) - new_user_session = UserSessionToken.create(session, user.user_id, user_session.application.id) - new_refresh_token = RefreshToken.create(session, new_user_session.token) - - session.delete(refresh_token) - session.delete(user_session) - session.commit() + user_session = refresh_token.user_session + user = user_session.user + + new_user_session_token = UserSessionToken.create(session, user.user_id, user_session.application.id).token + new_refresh_token_id = RefreshToken.create(session, new_user_session_token).id + + session.delete(refresh_token) + session.delete(user_session) + session.commit() - return jsonify({"status": "Validated", "refresh_token": new_refresh_token.id, "access_token": new_user_session.token, "expires": 86400}) + return jsonify({"status": "Validated", "refresh_token": new_refresh_token_id, "access_token": new_user_session_token, "expires": 86400}) @server.app.route(f"/{AUGUR_API_VERSION}/user/query", methods=['POST']) From 5fe4e19b8aad410fac097dc419c26593de48d937 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Thu, 26 Jan 2023 08:42:56 -0600 Subject: [PATCH 037/134] Add collection status table Signed-off-by: Andrew Brain --- .../application/db/models/augur_operations.py | 14 +++++++ .../versions/5_add_collection_status_table.py | 37 +++++++++++++++++++ 2 files changed, 51 insertions(+) create mode 100644 augur/application/schema/alembic/versions/5_add_collection_status_table.py diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 75e5a59e6c..a7247bf951 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -914,3 +914,17 @@ def create(session, user_session_token_id): return refresh_token + +class CollectionStatus(Base): + __tablename__ = "collection_status" + __table_args__ = ( + {"schema": "augur_operations"} + ) + + repo_id = Column(ForeignKey("augur_data.repo.repo_id", name="collection_status_repo_id_fk"), primary_key=True) + data_last_collected = Column(TIMESTAMP) + event_last_collected = Column(TIMESTAMP) + status = Column(String, nullable=False, server_default=text("'Pending'")) + task_id = Column(String) + + repo = relationship("Repo") diff --git a/augur/application/schema/alembic/versions/5_add_collection_status_table.py b/augur/application/schema/alembic/versions/5_add_collection_status_table.py new file mode 100644 index 0000000000..4db2e321a6 --- /dev/null +++ b/augur/application/schema/alembic/versions/5_add_collection_status_table.py @@ -0,0 +1,37 @@ +"""Add collection status table + +Revision ID: 5 +Revises: 4 +Create Date: 2023-01-26 08:30:05.524959 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = '5' +down_revision = '4' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('collection_status', + sa.Column('repo_id', sa.BigInteger(), nullable=False), + sa.Column('data_last_collected', postgresql.TIMESTAMP(), nullable=True), + sa.Column('event_last_collected', postgresql.TIMESTAMP(), nullable=True), + sa.Column('status', sa.String(), server_default=sa.text("'Pending'"), nullable=False), + sa.Column('task_id', sa.String(), nullable=True), + sa.ForeignKeyConstraint(['repo_id'], ['augur_data.repo.repo_id'], name='collection_status_repo_id_fk'), + sa.PrimaryKeyConstraint('repo_id'), + schema='augur_operations' + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('collection_status', schema='augur_operations') + # ### end Alembic commands ### From 938c706325422a437ef907f5c317da0faa9cb319 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 26 Jan 2023 08:50:46 -0600 Subject: [PATCH 038/134] Fix server engine (#2152) Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain --- augur/api/server.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/augur/api/server.py b/augur/api/server.py index 576ce1bdda..2f13eeab1a 100644 --- a/augur/api/server.py +++ b/augur/api/server.py @@ -49,7 +49,9 @@ def __init__(self): """Initialize the Server class.""" self.logger = AugurLogger("server").get_logger() - + self.session = DatabaseSession(self.logger) + self.config = AugurConfig(self.logger, self.session) + self.engine = self.session.engine self.cache_manager = self.create_cache_manager() self.server_cache = self.get_server_cache() @@ -435,12 +437,9 @@ def get_server_cache(self) -> Cache: server cache """ - with DatabaseSession(self.logger) as session: - config = AugurConfig(self.logger, session) - - expire = int(config.get_value('Server', 'cache_expire')) - server_cache = self.cache_manager.get_cache('server', expire=expire) - server_cache.clear() + expire = int(self.config.get_value('Server', 'cache_expire')) + server_cache = self.cache_manager.get_cache('server', expire=expire) + server_cache.clear() return server_cache From bb46575d9c7d913e7c4cc5fddf9280f20c87fd09 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 26 Jan 2023 09:05:21 -0600 Subject: [PATCH 039/134] More 8knot fixes (#2151) * Fixes to various frontend issues Signed-off-by: Andrew Brain * Fix deleting repo Signed-off-by: Andrew Brain * Temp fixes Signed-off-by: Andrew Brain * Fix session passing Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Possible fix Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Fix dumb error Signed-off-by: Andrew Brain * Fix session out of scope issue Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/api/routes/user.py | 52 ++++++++++--------- augur/api/view/api.py | 5 +- augur/api/view/augur_view.py | 7 +++ augur/api/view/routes.py | 6 ++- .../application/db/models/augur_operations.py | 3 -- 5 files changed, 42 insertions(+), 31 deletions(-) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 1d7c689166..3362d61211 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -139,25 +139,26 @@ def generate_session(application): if not username: return jsonify({"status": "Invalid authorization code"}) - user = User.get_user(username) - if not user: - return jsonify({"status": "Invalid user"}) + with DatabaseSession(logger) as session: - seconds_to_expire = 86400 + user = User.get_user(session, username) + if not user: + return jsonify({"status": "Invalid user"}) - with DatabaseSession(logger) as session: + seconds_to_expire = 86400 existing_session = session.query(UserSessionToken).filter(UserSessionToken.user_id == user.user_id, UserSessionToken.application_id == application.id).first() if existing_session: existing_session.delete_refresh_tokens(session) - + session.delete(existing_session) + session.commit() - user_session_token = UserSessionToken.create(user.user_id, application.id, seconds_to_expire).token - refresh_token = RefreshToken.create(user_session_token) + user_session_token = UserSessionToken.create(session, user.user_id, application.id, seconds_to_expire).token + refresh_token = RefreshToken.create(session, user_session_token) - response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) - response.headers["Cache-Control"] = "no-store" + response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) + response.headers["Cache-Control"] = "no-store" return response @@ -172,25 +173,26 @@ def refresh_session(application): if request.args.get("grant_type") != "refresh_token": return jsonify({"status": "Invalid grant type"}) - session = Session() - refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() - if not refresh_token: - return jsonify({"status": "Invalid refresh token"}) + with DatabaseSession(logger) as session: - if refresh_token.user_session.application == application: - return jsonify({"status": "Applications do not match"}) + refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() + if not refresh_token: + return jsonify({"status": "Invalid refresh token"}) - user_session = refresh_token.user_session - user = user_session.user + if refresh_token.user_session.application == application: + return jsonify({"status": "Applications do not match"}) - new_user_session = UserSessionToken.create(user.user_id, user_session.application.id) - new_refresh_token = RefreshToken.create(new_user_session.token) - - session.delete(refresh_token) - session.delete(user_session) - session.commit() + user_session = refresh_token.user_session + user = user_session.user + + new_user_session_token = UserSessionToken.create(session, user.user_id, user_session.application.id).token + new_refresh_token_id = RefreshToken.create(session, new_user_session_token).id + + session.delete(refresh_token) + session.delete(user_session) + session.commit() - return jsonify({"status": "Validated", "refresh_token": new_refresh_token.id, "access_token": new_user_session.token, "expires": 86400}) + return jsonify({"status": "Validated", "refresh_token": new_refresh_token_id, "access_token": new_user_session_token, "expires": 86400}) @server.app.route(f"/{AUGUR_API_VERSION}/user/query", methods=['POST']) diff --git a/augur/api/view/api.py b/augur/api/view/api.py index 721c8164ef..f31fbd1057 100644 --- a/augur/api/view/api.py +++ b/augur/api/view/api.py @@ -1,6 +1,7 @@ from flask import Flask, render_template, render_template_string, request, abort, jsonify, redirect, url_for, session, flash from flask_login import current_user, login_required from augur.application.db.models import Repo +from augur.application.db.session import DatabaseSession # from augur.util.repo_load_controller import parse_org_url, parse_repo_url from .utils import * @@ -88,8 +89,10 @@ def user_remove_repo(): repo = int(repo) + with DatabaseSession(logger) as session: + result = current_user.remove_repo(session, group, repo)[0] - if current_user.remove_repo(group, repo)[0]: + if result: flash(f"Successfully removed repo {repo} from group {group}") else: flash("An error occurred removing repo from group") diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index a4ede35e30..3afa315a11 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -64,8 +64,15 @@ def load_user(user_id): user = User.get_user(db_session, user_id) groups = user.groups + tokens = user.tokens + applications = user.applications + for application in applications: + sessions = application.sessions for group in groups: repos = group.repos + for token in tokens: + application = token.application + db_session.expunge(user) if not user: return None diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 0a8caa00cb..78a461a1d6 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -216,9 +216,11 @@ def authorize_user(): if not client_id or response_type != "code": return render_message("Invalid Request", "Something went wrong. You may need to return to the previous application and make the request again.") + + with DatabaseSession(logger) as session: - # TODO get application from client id - client = ClientApplication.get_by_id(client_id) + # TODO get application from client id + client = ClientApplication.get_by_id(session, client_id) return render_module("authorization", app = client, state = state) diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index c11b5ac66a..3f3b8566f7 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -839,9 +839,6 @@ def delete_refresh_tokens(self, session): session.delete(token) session.commit() - session.delete(self) - session.commit() - class ClientApplication(Base): __tablename__ = "client_applications" __table_args__ = ( From d6a18ade65f52d64ef481fc43171dfd56a00c1c8 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Thu, 26 Jan 2023 09:17:21 -0600 Subject: [PATCH 040/134] Implement task success handler for repo collection Signed-off-by: Andrew Brain --- augur/application/db/models/__init__.py | 3 ++- augur/application/db/models/augur_data.py | 6 ++++++ augur/tasks/start_tasks.py | 20 +++++++++++++++----- 3 files changed, 23 insertions(+), 6 deletions(-) diff --git a/augur/application/db/models/__init__.py b/augur/application/db/models/__init__.py index 9721a8f4bb..3941db76c0 100644 --- a/augur/application/db/models/__init__.py +++ b/augur/application/db/models/__init__.py @@ -105,7 +105,8 @@ ClientApplication, Subscription, SubscriptionType, - RefreshToken + RefreshToken, + CollectionStatus ) DEFAULT_REPO_GROUP_IDS = [1, 10] diff --git a/augur/application/db/models/augur_data.py b/augur/application/db/models/augur_data.py index 12bc510e5a..95bb401920 100644 --- a/augur/application/db/models/augur_data.py +++ b/augur/application/db/models/augur_data.py @@ -845,12 +845,18 @@ class Repo(Base): repo_group = relationship("RepoGroup") user_repo = relationship("UserRepo") + collection_status = relationship("CollectionStatus") @staticmethod def get_by_id(session, repo_id): return session.query(Repo).filter(Repo.repo_id == repo_id).first() + @staticmethod + def get_by_repo_git(session, repo_git): + + return session.query(Repo).filter(Repo.repo_git == repo_git).first() + @staticmethod def is_valid_github_repo(session, url: str) -> bool: """Determine whether repo url is valid. diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 2d079101cb..77120918a7 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -29,10 +29,10 @@ from augur.application.logs import AugurLogger from augur.application.config import AugurConfig from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.application.db.util import execute_session_query from logging import Logger from augur.tasks.util.redis_list import RedisList +from augur.application.db.models import CollectionStatus, Repo CELERY_GROUP_TYPE = type(group()) CELERY_CHAIN_TYPE = type(chain()) @@ -49,10 +49,20 @@ def collection_task_wrapper(self,*args,**kwargs): @celery.task def task_success(repo_git): - logger = logging.getLogger(successResult.__name__) - - # set status to Finished in db - # set collection date in db + logger = logging.getLogger(task_success.__name__) + + with DatabaseSession(logger, engine) as session: + + repo = Repo.get_by_repo_git(session, repo_git) + if not repo: + raise Exception(f"Task with repo_git of {repo_git} but could not be found in Repo table") + + collection_status = repo.collection_status + + collection_status.status = "Success" + collection_status.data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + + session.commit() @celery.task def task_failed(request,exc,traceback): From 22c0414abfe7147b762fb18dd84be35981a4ff4b Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Thu, 26 Jan 2023 09:32:20 -0600 Subject: [PATCH 041/134] Implement more of montior task Signed-off-by: Andrew Brain --- augur/tasks/start_tasks.py | 45 ++++++++++++++++++++++++-------------- 1 file changed, 29 insertions(+), 16 deletions(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 77120918a7..28a01ea115 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -31,12 +31,21 @@ from augur.application.db.session import DatabaseSession from augur.application.db.util import execute_session_query from logging import Logger +from enum import Enum from augur.tasks.util.redis_list import RedisList from augur.application.db.models import CollectionStatus, Repo CELERY_GROUP_TYPE = type(group()) CELERY_CHAIN_TYPE = type(chain()) + +# class syntax +class CollectionState(Enum): + SUCCESS = "Success" + PENDING = "Pending" + ERROR = "Error" + COLLECTING = "Collecting" + """ @celery.task(bind=True) def collection_task_wrapper(self,*args,**kwargs): @@ -59,7 +68,7 @@ def task_success(repo_git): collection_status = repo.collection_status - collection_status.status = "Success" + collection_status.status = CollectionState.SUCCESS collection_status.data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') session.commit() @@ -215,24 +224,28 @@ def augur_collection_monitor(): #Get phase options from the config with DatabaseSession(logger, engine) as session: - config = AugurConfig(logger, session) - phase_options = config.get_section("Task_Routine") - #Get list of enabled phases - enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] - enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] - - # calculate current active repos - # calcuate the number of repos we would like to add to the queue + max_repo_count = 500 - # get repos with these requirements - # haven't been collected or not collected in awhile - # don't have a status of Error or Collecting + config = AugurConfig(logger, session) + phase_options = config.get_section("Task_Routine") - # loop through repos - # create chain - # start task - # set status in db to Collecting + #Get list of enabled phases + enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] + enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] + + active_repos = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING).all()) + + # get repos with these requirements + # haven't been collected or not collected in awhile + # don't have a status of Error or Collecting + # TODO: add filter to check for repos that haven't been collected in ahile + session.query(CollectionStatus).filter(CollectionStatus.status != CollectionState.ERROR, CollectionStatus.status != CollectionState.COLLECTING, CollectionStatus.data_last_collected == None) + + # loop through repos + # create chain + # start task + # set status in db to Collecting From 003fe31c0f427ecdfeb1231f36e3f466f0d20203 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 10:38:19 -0600 Subject: [PATCH 042/134] Handle repo_move Signed-off-by: Isaac Milarsky --- augur/tasks/github/detect_move/core.py | 12 +++++++++++ augur/tasks/start_tasks.py | 30 +++++++++++++++++++++++--- 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/augur/tasks/github/detect_move/core.py b/augur/tasks/github/detect_move/core.py index ce278020e2..911c9eaa13 100644 --- a/augur/tasks/github/detect_move/core.py +++ b/augur/tasks/github/detect_move/core.py @@ -5,6 +5,8 @@ from augur.tasks.github.util.util import get_owner_repo from augur.tasks.github.util.util import parse_json_response import logging +from augur.application.db.util import execute_session_query +from augur.tasks.start_tasks import CollectionState def extract_owner_and_repo_from_endpoint(session,url): response_from_gh = hit_api(session.oauths, url, session.logger) @@ -66,3 +68,13 @@ def ping_github_for_repo_move(session,repo): result = session.insert_data(current_repo_dict, Repo, ['repo_id']) session.logger.info(f"Updated repo for {owner}/{name}\n") + + statusQuery = session.query(CollectionStatus).filter(CollectionStatus.repo_id == repo.repo_id) + + collectionRecord = execute_session_query(statusQuery,'one') + collectionRecord.status = CollectionState.PENDING + session.commit() + + raise Exception("ERROR: Repo has moved! Marked repo as pending and stopped collection") + + diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 28a01ea115..010407b68e 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -77,8 +77,30 @@ def task_success(repo_git): def task_failed(request,exc,traceback): logger = logging.getLogger(task_failed.__name__) - # set status to Error in db - # log traceback to error file + with DatabaseSession(logger,engine) as session: + query = session.query(CollectionStatus).filter(CollectionStatus.task_id == request.id) + + collectionRecord = execute_session_query(query,'one') + + print(f"chain: {request.chain}") + #Make sure any further execution of tasks dependent on this one stops. + try: + #Replace the tasks queued ahead of this one in a chain with None. + request.chain = None + except AttributeError: + pass #Task is not part of a chain. Normal so don't log. + except Exception as e: + logger.error(f"Could not mutate request chain! \n Error: {e}") + + if collectionRecord.status == CollectionState.COLLECTING: + # set status to Error in db + collectionRecord.status = CollectionStatus.ERROR + session.commit() + + # log traceback to error file + session.logger.error(f"Task {request.id} raised exception: {exc}\n{traceback}") + + #Predefine phases. For new phases edit this and the config to reflect. @@ -94,6 +116,8 @@ def prelim_phase(repo_git): #TODO: if repo has moved mark it as pending. job = detect_github_repo_move.si(repo_obj.repo_git) + + return job @@ -240,7 +264,7 @@ def augur_collection_monitor(): # haven't been collected or not collected in awhile # don't have a status of Error or Collecting # TODO: add filter to check for repos that haven't been collected in ahile - session.query(CollectionStatus).filter(CollectionStatus.status != CollectionState.ERROR, CollectionStatus.status != CollectionState.COLLECTING, CollectionStatus.data_last_collected == None) + session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.PENDING, CollectionStatus.data_last_collected == None) # loop through repos # create chain From fdf2d9d2c89d7731ed393f04d043a40ae28d2d50 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Thu, 26 Jan 2023 10:58:31 -0600 Subject: [PATCH 043/134] Implement augur task monitor Signed-off-by: Andrew Brain --- augur/application/config.py | 2 +- augur/tasks/init/celery_app.py | 6 ++--- augur/tasks/start_tasks.py | 40 ++++++++++++++++++++++++---------- 3 files changed, 32 insertions(+), 16 deletions(-) diff --git a/augur/application/config.py b/augur/application/config.py index 7cc9c01ebc..60c1c1acc0 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -77,7 +77,7 @@ def get_development_flag(): "connection_string": "amqp://augur:password123@localhost:5672/augur_vhost" }, "Tasks": { - "collection_interval": 2592000 + "collection_interval": 600 }, "Message_Insights": { "insight_days": 30, diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 166b4303e5..764efe255e 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -115,15 +115,15 @@ def setup_periodic_tasks(sender, **kwargs): Returns The tasks so that they are grouped by the module they are defined in """ - from augur.tasks.start_tasks import start_task + from augur.tasks.start_tasks import augur_collection_monitor with DatabaseSession(logger) as session: config = AugurConfig(logger, session) collection_interval = config.get_value('Tasks', 'collection_interval') - logger.info(f"Scheduling collection every {collection_interval/60/60} hours") - sender.add_periodic_task(collection_interval, start_task.s()) + logger.info(f"Scheduling collection every {collection_interval/60} minutes") + sender.add_periodic_task(collection_interval, augur_collection_monitor.s()) @after_setup_logger.connect diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 28a01ea115..6aee0b8c1a 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -10,6 +10,7 @@ #from celery.result import AsyncResult from celery import signature from celery import group, chain, chord, signature + from sqlalchemy import or_, and_ from augur.tasks.github import * @@ -222,10 +223,13 @@ def start_task(): def augur_collection_monitor(): logger = logging.getLogger(augur_collection_monitor.__name__) + logger.info("Checking for repos to collect") + #Get phase options from the config with DatabaseSession(logger, engine) as session: max_repo_count = 500 + days = 30 config = AugurConfig(logger, session) phase_options = config.get_section("Task_Routine") @@ -234,18 +238,30 @@ def augur_collection_monitor(): enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] - active_repos = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING).all()) - - # get repos with these requirements - # haven't been collected or not collected in awhile - # don't have a status of Error or Collecting - # TODO: add filter to check for repos that haven't been collected in ahile - session.query(CollectionStatus).filter(CollectionStatus.status != CollectionState.ERROR, CollectionStatus.status != CollectionState.COLLECTING, CollectionStatus.data_last_collected == None) - - # loop through repos - # create chain - # start task - # set status in db to Collecting + active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING).all()) + + cutoff_date = datetime.datetime.now() - datetime.timedelta(days=days) + not_erroed = CollectionStatus.status != str(CollectionState.ERROR) + not_collecting = CollectionStatus.status != str(CollectionState.COLLECTING) + never_collected = CollectionStatus.data_last_collected == None + old_collection = CollectionStatus.data_last_collected <= cutoff_date + + limit = max_repo_count-active_repo_count + + repo_status_list = session.query(CollectionStatus).filter(and_(not_erroed, not_collecting, or_(never_collected, old_collection))).limit(limit).all() + + for repo_status in repo_status_list: + + repo_git = repo_status.repo.repo_git + + # create and start repo chain + task_id = None + + repo_status.task_id = task_id + repo_status.status = CollectionState.COLLECTING + session.commit() + + From 12e2e08e8ef8203f5c04c58702396a5cfaf0819f Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 11:09:51 -0600 Subject: [PATCH 044/134] finish up augur_collection_monitor Signed-off-by: Isaac Milarsky --- augur/tasks/start_tasks.py | 48 +++++++++++++++++++++++++------------- 1 file changed, 32 insertions(+), 16 deletions(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 010407b68e..0113e8be44 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -173,14 +173,18 @@ class AugurTaskRoutine: Attributes: logger (Logger): Get logger from AugurLogger jobs_dict (dict): Dict of data collection phases to run + repos (List[int]): List of repo_ids to run collection on. collection_phases (List[str]): List of phases to run in augur collection. + session: Database session to use """ - def __init__(self,collection_phases: List[str]=[]): + def __init__(self,session,repos: List[int]=[],collection_phases: List[str]=[]): self.logger = AugurLogger("data_collection_jobs").get_logger() #self.session = TaskSession(self.logger) self.jobs_dict = {} self.collection_phases = collection_phases #self.disabled_collection_tasks = disabled_collection_tasks + self.repos = repos + self.session = session #Assemble default phases #These will then be able to be overridden through the config. @@ -207,17 +211,27 @@ def start_data_collection(self): self.logger.info(f"Enabled phases: {list(self.jobs_dict.keys())}") augur_collection_list = [] - augur_collection_sequence = [] - for phaseName, job in self.jobs_dict.items(): - self.logger.info(f"Queuing phase {phaseName}") - - #Add the phase to the sequence in order as a celery task. - #The preliminary task creates the larger task chain - augur_collection_sequence.append(job.si()) - #Link all phases in a chain and send to celery - augur_collection_chain = chain(*augur_collection_sequence) - augur_collection_chain.apply_async() + + for repo_id in self.repos: + repo_git = self.session.query(Repo).filter( Repo.repo_id == repo_id).one().repo_git + augur_collection_sequence = [] + + for phaseName, job in self.jobs_dict.items(): + self.logger.info(f"Queuing phase {phaseName} for repo {repo_git}") + + #Add the phase to the sequence in order as a celery task. + #The preliminary task creates the larger task chain + augur_collection_sequence.append(job.si(repo_git)) + + #Link all phases in a chain and send to celery + augur_collection_chain = chain(*augur_collection_sequence) + augur_collection_chain.apply_async() + + #set status in database to collecting + repoStatus = self.session.query(CollectionStatus).filter(CollectionStatus.repo_id == repo_id).one() + repoStatus.status = CollectionState.COLLECTING + session.commit() """ @celery.task @@ -264,12 +278,14 @@ def augur_collection_monitor(): # haven't been collected or not collected in awhile # don't have a status of Error or Collecting # TODO: add filter to check for repos that haven't been collected in ahile - session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.PENDING, CollectionStatus.data_last_collected == None) + query = session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.PENDING, CollectionStatus.data_last_collected == None) + + + repoStatusObjs = query.limit(max_repo_count - active_repos).all() + + repo_ids = [repo.repo_id for repo in repoStatusObjs] - # loop through repos - # create chain - # start task - # set status in db to Collecting + augur_collection = AugurTaskRoutine(session,repos=repo_ids,collection_phases=enabled_phases) From 500875832d47fcb6bcf2cfc077c1431d7b1b6a15 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 11:17:55 -0600 Subject: [PATCH 045/134] Task success and fail Signed-off-by: Isaac Milarsky --- augur/tasks/start_tasks.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 1aa55a2ef8..0ec11d1c0c 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -224,10 +224,11 @@ def start_data_collection(self): #Add the phase to the sequence in order as a celery task. #The preliminary task creates the larger task chain augur_collection_sequence.append(job.si(repo_git)) - + + augur_collection_sequence.append(task_success(repo_git)) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) - augur_collection_chain.apply_async() + augur_collection_chain.apply_async(link_error=task_failed.s()) #set status in database to collecting repoStatus = self.session.query(CollectionStatus).filter(CollectionStatus.repo_id == repo_id).one() From 00932bf5ea27542e4b0bd2d0fadafac061ab046f Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 26 Jan 2023 11:28:11 -0600 Subject: [PATCH 046/134] Make use of orm relationships to simplify code Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/tasks/start_tasks.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 0ec11d1c0c..2bb11ce665 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -215,9 +215,11 @@ def start_data_collection(self): for repo_id in self.repos: - repo_git = self.session.query(Repo).filter( Repo.repo_id == repo_id).one().repo_git - augur_collection_sequence = [] + repo = self.session.query(Repo).filter(Repo.repo_id == repo_id).one() + repo_git = repo.repo_git + + augur_collection_sequence = [] for phaseName, job in self.jobs_dict.items(): self.logger.info(f"Queuing phase {phaseName} for repo {repo_git}") @@ -231,9 +233,9 @@ def start_data_collection(self): augur_collection_chain.apply_async(link_error=task_failed.s()) #set status in database to collecting - repoStatus = self.session.query(CollectionStatus).filter(CollectionStatus.repo_id == repo_id).one() + repoStatus = repo.collection_status repoStatus.status = CollectionState.COLLECTING - session.commit() + self.session.commit() """ @celery.task From 43d5708a722b5c29d5e9a329b503f7f02a9aaa30 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 11:42:10 -0600 Subject: [PATCH 047/134] python imports Signed-off-by: Isaac Milarsky --- augur/tasks/github/detect_move/core.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/augur/tasks/github/detect_move/core.py b/augur/tasks/github/detect_move/core.py index 911c9eaa13..875ca8a7e2 100644 --- a/augur/tasks/github/detect_move/core.py +++ b/augur/tasks/github/detect_move/core.py @@ -6,7 +6,13 @@ from augur.tasks.github.util.util import parse_json_response import logging from augur.application.db.util import execute_session_query -from augur.tasks.start_tasks import CollectionState + +class CollectionState(Enum): + SUCCESS = "Success" + PENDING = "Pending" + ERROR = "Error" + COLLECTING = "Collecting" + def extract_owner_and_repo_from_endpoint(session,url): response_from_gh = hit_api(session.oauths, url, session.logger) From b0adb39ccb879fdb0120c2bbba5858d23cb6a8bb Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 11:43:47 -0600 Subject: [PATCH 048/134] python imports Signed-off-by: Isaac Milarsky --- augur/tasks/github/detect_move/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/augur/tasks/github/detect_move/core.py b/augur/tasks/github/detect_move/core.py index 875ca8a7e2..bc5ecb64f8 100644 --- a/augur/tasks/github/detect_move/core.py +++ b/augur/tasks/github/detect_move/core.py @@ -5,6 +5,7 @@ from augur.tasks.github.util.util import get_owner_repo from augur.tasks.github.util.util import parse_json_response import logging +from enum import Enum from augur.application.db.util import execute_session_query class CollectionState(Enum): From 63f1d065f5e8b10d75b489b89fdb79abdee3cac2 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 26 Jan 2023 11:46:06 -0600 Subject: [PATCH 049/134] Fix backend.py to work with new system Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/cli/backend.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index a80701e0c1..5e9312995d 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -15,12 +15,13 @@ from celery import chain, signature, group import uuid import traceback +from sqlalchemy import update from augur import instance_id -from augur.tasks.start_tasks import start_task +from augur.tasks.start_tasks import augur_collection_monitor, CollectionState from augur.tasks.init.redis_connection import redis_connection -from augur.application.db.models import Repo +from augur.application.db.models import Repo, CollectionStatus from augur.application.db.session import DatabaseSession from augur.application.logs import AugurLogger from augur.application.config import AugurConfig @@ -93,7 +94,15 @@ def start(disable_collection, development, port): cpu_worker_process = subprocess.Popen(cpu_worker.split(" ")) time.sleep(5) - start_task.si().apply_async() + with DatabaseSession(logger) as session: + + session.execute( + update(CollectionStatus) + .where(CollectionStatus.status == CollectionState.COLLECTING) + .values(status="Pending") + ) + + augur_collection_monitor.si().apply_async() celery_command = "celery -A augur.tasks.init.celery_app.celery_app beat -l debug" celery_beat_process = subprocess.Popen(celery_command.split(" ")) From aba325fb4b71d07354f73e6f41e22ccd54a43f79 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 13:39:18 -0600 Subject: [PATCH 050/134] python enum syntax is kinda weird Signed-off-by: Isaac Milarsky --- augur/application/cli/backend.py | 2 +- augur/tasks/github/detect_move/core.py | 2 +- augur/tasks/start_tasks.py | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 5e9312995d..10b1a6cfbb 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -98,7 +98,7 @@ def start(disable_collection, development, port): session.execute( update(CollectionStatus) - .where(CollectionStatus.status == CollectionState.COLLECTING) + .where(CollectionStatus.status == CollectionState.COLLECTING.value) .values(status="Pending") ) diff --git a/augur/tasks/github/detect_move/core.py b/augur/tasks/github/detect_move/core.py index bc5ecb64f8..c4ee89dad1 100644 --- a/augur/tasks/github/detect_move/core.py +++ b/augur/tasks/github/detect_move/core.py @@ -79,7 +79,7 @@ def ping_github_for_repo_move(session,repo): statusQuery = session.query(CollectionStatus).filter(CollectionStatus.repo_id == repo.repo_id) collectionRecord = execute_session_query(statusQuery,'one') - collectionRecord.status = CollectionState.PENDING + collectionRecord.status = CollectionState.PENDING.value session.commit() raise Exception("ERROR: Repo has moved! Marked repo as pending and stopped collection") diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 2bb11ce665..71e1ac6f4b 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -69,7 +69,7 @@ def task_success(repo_git): collection_status = repo.collection_status - collection_status.status = CollectionState.SUCCESS + collection_status.status = CollectionState.SUCCESS.value collection_status.data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') session.commit() @@ -93,7 +93,7 @@ def task_failed(request,exc,traceback): except Exception as e: logger.error(f"Could not mutate request chain! \n Error: {e}") - if collectionRecord.status == CollectionState.COLLECTING: + if collectionRecord.status == CollectionState.COLLECTING.value: # set status to Error in db collectionRecord.status = CollectionStatus.ERROR session.commit() @@ -234,7 +234,7 @@ def start_data_collection(self): #set status in database to collecting repoStatus = repo.collection_status - repoStatus.status = CollectionState.COLLECTING + repoStatus.status = CollectionState.COLLECTING.value self.session.commit() """ @@ -279,11 +279,11 @@ def augur_collection_monitor(): enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] - active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING).all()) + active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value).all()) cutoff_date = datetime.datetime.now() - datetime.timedelta(days=days) - not_erroed = CollectionStatus.status != str(CollectionState.ERROR) - not_collecting = CollectionStatus.status != str(CollectionState.COLLECTING) + not_erroed = CollectionStatus.status != str(CollectionState.ERROR.value) + not_collecting = CollectionStatus.status != str(CollectionState.COLLECTING.value) never_collected = CollectionStatus.data_last_collected == None old_collection = CollectionStatus.data_last_collected <= cutoff_date From 294a4e7bc5c0b44d1287f575809c0ff63d1c090d Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 15:34:36 -0600 Subject: [PATCH 051/134] cleanup syntax Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 5 +++-- augur/tasks/github/events/tasks.py | 1 + augur/tasks/start_tasks.py | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 5c2b403fe3..5597f9efb8 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -448,8 +448,9 @@ def generate_contributor_sequence(logger,repo_git): query = s.sql.text("""SELECT repo_id FROM repo WHERE repo_git=:value""").bindparams(value=repo_git) - repo = execute_session_query(query,'one')#all_repos = session.fetchall_data_from_sql_text(query) - repo_id = repo.repo_id + repo = session.execute_sql(query).fetchone() + session.logger.info(f"repo: {repo}") + repo_id = repo[0] #pdb.set_trace() #breakpoint() #for repo in all_repos: diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 3b77bb7be5..291dc93d90 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -14,6 +14,7 @@ platform_id = 1 +@celery.task() def collect_events(repo_git: str): logger = logging.getLogger(collect_events.__name__) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 71e1ac6f4b..28345858ca 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -225,9 +225,9 @@ def start_data_collection(self): #Add the phase to the sequence in order as a celery task. #The preliminary task creates the larger task chain - augur_collection_sequence.append(job.si(repo_git)) + augur_collection_sequence.append(job(repo_git)) - augur_collection_sequence.append(task_success(repo_git)) + augur_collection_sequence.append(task_success.si(repo_git)) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) augur_collection_chain.apply_async(link_error=task_failed.s()) From 38a92f5939afb0b537c8e802fb8882b7325a02a2 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Thu, 26 Jan 2023 16:44:11 -0600 Subject: [PATCH 052/134] Fixes to user creation Signed-off-by: Andrew Brain --- augur/api/view/routes.py | 4 ++-- augur/application/db/models/augur_operations.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 78a461a1d6..fd6b32b755 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -171,11 +171,11 @@ def user_login(): last_name = request.form.get('last_name') admin = request.form.get('admin') or False - result = User.create_user(db_session, username, password, email, first_name, last_name, admin) + result = User.create_user(username, password, email, first_name, last_name, admin) if not result[0]: raise LoginException("An error occurred registering your account") else: - user = User.get_user(username) + user = User.get_user(db_session, username) flash(result[1]["status"]) # Log the user in if the password is valid diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 3f3b8566f7..9cdbd621f7 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -322,7 +322,7 @@ def create_user(username: str, password: str, email: str, first_name:str, last_n session.add(user) session.commit() - result = user.add_group(session, "default") + result = user.add_group("default") if not result[0] and result[1]["status"] != "Group already exists": return False, {"status": "Failed to add default group for the user"} From cab1245b1c6f4cc717570101b0e297eede315360 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 26 Jan 2023 17:38:22 -0600 Subject: [PATCH 053/134] Rate limit handle Signed-off-by: Isaac Milarsky --- augur/tasks/github/util/github_paginator.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/augur/tasks/github/util/github_paginator.py b/augur/tasks/github/util/github_paginator.py index 6734c829bf..7f0e31809c 100644 --- a/augur/tasks/github/util/github_paginator.py +++ b/augur/tasks/github/util/github_paginator.py @@ -123,6 +123,16 @@ def process_dict_response(logger: logging.Logger, response: httpx.Response, page if errors: for error in errors: if "API rate limit exceeded for user" in error['message']: + current_epoch = int(time.time()) + epoch_when_key_resets = int(response.headers["X-RateLimit-Reset"]) + key_reset_time = epoch_when_key_resets - current_epoch + + if key_reset_time < 0: + logger.error(f"Key reset time was less than 0 setting it to 0.\nThe current epoch is {current_epoch} and the epoch that the key resets at is {epoch_when_key_resets}") + key_reset_time = 0 + + logger.info(f"\n\n\nAPI rate limit exceeded. Sleeping until the key resets ({key_reset_time} seconds)") + time.sleep(key_reset_time) return GithubApiResult.RATE_LIMIT_EXCEEDED return GithubApiResult.NEW_RESULT From 328925abf6d169e8699af79ae7c6cdd3f09d9c59 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Fri, 27 Jan 2023 11:28:06 -0600 Subject: [PATCH 054/134] Fix upgrade script Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/db/models/augur_operations.py | 11 +++++++++++ .../alembic/versions/5_add_collection_status_table.py | 11 +++++++++++ 2 files changed, 22 insertions(+) diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index a7247bf951..44339f85f7 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -730,6 +730,8 @@ def add(session, url: List[str], user_id: int, group_name=None, group_id=None, v result = UserRepo.insert(session, repo_id, group_id) + CollectionStatus.create(session, repo_id) + if not result: return False, {"status": "repo_user insertion failed", "repo_url": url} @@ -928,3 +930,12 @@ class CollectionStatus(Base): task_id = Column(String) repo = relationship("Repo") + + @staticmethod + def create(session, repo_id): + + status = CollectionStatus(repo_id=repo_id) + session.add(status) + session.commit() + + return status diff --git a/augur/application/schema/alembic/versions/5_add_collection_status_table.py b/augur/application/schema/alembic/versions/5_add_collection_status_table.py index 4db2e321a6..a2ce4d2cb3 100644 --- a/augur/application/schema/alembic/versions/5_add_collection_status_table.py +++ b/augur/application/schema/alembic/versions/5_add_collection_status_table.py @@ -8,6 +8,8 @@ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql +from sqlalchemy.sql import text + # revision identifiers, used by Alembic. revision = '5' @@ -28,6 +30,15 @@ def upgrade(): sa.PrimaryKeyConstraint('repo_id'), schema='augur_operations' ) + + # add collection status for any existing repos + conn = op.get_bind() + repos = conn.execute(text("""SELECT repo_id from repo""")).fetchall() + + for repo in repos: + repo_id = repo[0] + conn.execute(text(f"""INSERT INTO "augur_operations"."collection_status" ("repo_id") VALUES ({repo_id});""")) + # ### end Alembic commands ### From 8910dc2fd3a6a5705413e9db46f34002cb8328b1 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 27 Jan 2023 11:39:58 -0600 Subject: [PATCH 055/134] Create status when adding repo Signed-off-by: Andrew Brain --- .../application/db/models/augur_operations.py | 18 ++++++++++-------- augur/util/repo_load_controller.py | 4 +++- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 44339f85f7..2987ad4aa6 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -729,12 +729,13 @@ def add(session, url: List[str], user_id: int, group_name=None, group_id=None, v return False, {"status": "Repo insertion failed", "repo_url": url} result = UserRepo.insert(session, repo_id, group_id) - - CollectionStatus.create(session, repo_id) - if not result: return False, {"status": "repo_user insertion failed", "repo_url": url} + status = CollectionStatus.insert(session, repo_id) + if not status: + return False, {"status": "Failed to create status for repo", "repo_url": url} + return True, {"status": "Repo Added", "repo_url": url} @staticmethod @@ -932,10 +933,11 @@ class CollectionStatus(Base): repo = relationship("Repo") @staticmethod - def create(session, repo_id): + def insert(session, repo_id): - status = CollectionStatus(repo_id=repo_id) - session.add(status) - session.commit() + collection_status_unique = ["repo_id"] + result = session.insert_data({"repo_id": repo_id}, CollectionStatus, collection_status_unique, on_conflict_update=False) + if not result: + return False - return status + return True diff --git a/augur/util/repo_load_controller.py b/augur/util/repo_load_controller.py index 7ce9fa7409..934e435a70 100644 --- a/augur/util/repo_load_controller.py +++ b/augur/util/repo_load_controller.py @@ -8,7 +8,7 @@ from typing import List, Any, Dict from augur.application.db.engine import DatabaseEngine -from augur.application.db.models import Repo, UserRepo, RepoGroup, UserGroup, User +from augur.application.db.models import Repo, UserRepo, RepoGroup, UserGroup, User, CollectionStatus from augur.application.db.models.augur_operations import retrieve_org_repos from augur.application.db.util import execute_session_query @@ -95,6 +95,8 @@ def add_cli_repo(self, repo_data: Dict[str, Any], valid_repo=False): UserRepo.insert(self.session, repo_id) + CollectionStatus.insert(self.session, repo_id) + def add_cli_org(self, org_name): """Add list of orgs and their repos to specified repo_groups From 0763c9eea916ad2635dc7d42636fd52798020917 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 27 Jan 2023 13:21:28 -0600 Subject: [PATCH 056/134] Various fixes to new collection system Signed-off-by: Andrew Brain --- augur/application/cli/backend.py | 10 +++------- augur/application/db/models/augur_data.py | 2 +- augur/application/db/models/augur_operations.py | 15 +++++++-------- augur/tasks/start_tasks.py | 13 ++++++++++--- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 10b1a6cfbb..feaa9922eb 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -95,13 +95,9 @@ def start(disable_collection, development, port): time.sleep(5) with DatabaseSession(logger) as session: - - session.execute( - update(CollectionStatus) - .where(CollectionStatus.status == CollectionState.COLLECTING.value) - .values(status="Pending") - ) - + session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value).update({CollectionStatus.status: "Pending"}) + session.commit() + augur_collection_monitor.si().apply_async() celery_command = "celery -A augur.tasks.init.celery_app.celery_app beat -l debug" diff --git a/augur/application/db/models/augur_data.py b/augur/application/db/models/augur_data.py index 95bb401920..5365dbf37f 100644 --- a/augur/application/db/models/augur_data.py +++ b/augur/application/db/models/augur_data.py @@ -845,7 +845,7 @@ class Repo(Base): repo_group = relationship("RepoGroup") user_repo = relationship("UserRepo") - collection_status = relationship("CollectionStatus") + collection_status = relationship("CollectionStatus", back_populates="repo") @staticmethod def get_by_id(session, repo_id): diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index dd5afbfe84..4b6f192b81 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -322,7 +322,7 @@ def create_user(username: str, password: str, email: str, first_name:str, last_n session.add(user) session.commit() - result = user.add_group("default") + result = user.add_group(session, "default") if not result[0] and result[1]["status"] != "Group already exists": return False, {"status": "Failed to add default group for the user"} @@ -411,9 +411,7 @@ def remove_group(self, group_name): def add_repo(self, group_name, repo_url): - from augur.tasks.github.util.github_task_session import GithubTaskSession - - with GithubTaskSession(logger) as session: + with DatabaseSession(logger) as session: result = UserRepo.add(session, repo_url, self.user_id, group_name) return result @@ -427,9 +425,7 @@ def remove_repo(self, session, group_name, repo_id): def add_org(self, group_name, org_url): - from augur.tasks.github.util.github_task_session import GithubTaskSession - - with GithubTaskSession(logger) as session: + with DatabaseSession(logger) as session: result = UserRepo.add_org_repos(session, org_url, self.user_id, group_name) return result @@ -842,6 +838,9 @@ def delete_refresh_tokens(self, session): session.delete(token) session.commit() + session.delete(self) + session.commit() + class ClientApplication(Base): __tablename__ = "client_applications" __table_args__ = ( @@ -931,7 +930,7 @@ class CollectionStatus(Base): status = Column(String, nullable=False, server_default=text("'Pending'")) task_id = Column(String) - repo = relationship("Repo") + repo = relationship("Repo", back_populates="collection_status") @staticmethod def insert(session, repo_id): diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 28345858ca..29cd5dbb64 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -61,13 +61,15 @@ def collection_task_wrapper(self,*args,**kwargs): def task_success(repo_git): logger = logging.getLogger(task_success.__name__) + logger.info(f"Repo '{repo_git}' succeeded") + with DatabaseSession(logger, engine) as session: repo = Repo.get_by_repo_git(session, repo_git) if not repo: raise Exception(f"Task with repo_git of {repo_git} but could not be found in Repo table") - collection_status = repo.collection_status + collection_status = repo.collection_status[0] collection_status.status = CollectionState.SUCCESS.value collection_status.data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') @@ -230,10 +232,13 @@ def start_data_collection(self): augur_collection_sequence.append(task_success.si(repo_git)) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) - augur_collection_chain.apply_async(link_error=task_failed.s()) + task_id = augur_collection_chain.apply_async(link_error=task_failed.s()).task_id + + self.logger.info(f"Setting repo_id {repo_id} to collecting") #set status in database to collecting - repoStatus = repo.collection_status + repoStatus = repo.collection_status[0] + repoStatus.task_id = task_id repoStatus.status = CollectionState.COLLECTING.value self.session.commit() @@ -293,6 +298,8 @@ def augur_collection_monitor(): repo_ids = [repo.repo_id for repo in repo_status_list] + logger.info(f"Starting collection on {len(repo_ids)} repos") + augur_collection = AugurTaskRoutine(session,repos=repo_ids,collection_phases=enabled_phases) augur_collection.start_data_collection() From c791e1eae8c5636d5d5434aedb6a30c3cd59b85a Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 27 Jan 2023 14:59:14 -0600 Subject: [PATCH 057/134] Non-domain repo tasks Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 40 ++++++++++++++++++++++++++++----- augur/tasks/init/celery_app.py | 5 +++++ augur/tasks/start_tasks.py | 25 +++++++-------------- 3 files changed, 47 insertions(+), 23 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 5597f9efb8..443107bf90 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -321,10 +321,10 @@ def analyze_commits_in_parallel(repo_id, multithreaded: bool)-> None: @celery.task def nuke_affiliations_facade_task(): logger = logging.getLogger(nuke_affiliations_facade_task.__name__) - # TODO: Is this session ever closed? - session = FacadeSession(logger) - nuke_affiliations(session) + with FacadeSession(logger) as session: + + nuke_affiliations(session) @celery.task def fill_empty_affiliations_facade_task(): @@ -517,6 +517,36 @@ def generate_facade_chain(logger,repo_git): #Generate contributor analysis task group. facade_sequence.append(generate_contributor_sequence(logger,repo_git)) + + logger.info(f"Facade sequence: {facade_sequence}") + return chain(*facade_sequence) + +def generate_non_repo_domain_facade_tasks(logger): + logger.info("Generating facade sequence") + with FacadeSession(logger) as session: + + # Figure out what we need to do + limited_run = session.limited_run + delete_marked_repos = session.delete_marked_repos + pull_repos = session.pull_repos + clone_repos = session.clone_repos + check_updates = session.check_updates + force_updates = session.force_updates + run_analysis = session.run_analysis + force_analysis = session.force_analysis + nuke_stored_affiliations = session.nuke_stored_affiliations + fix_affiliations = session.fix_affiliations + force_invalidate_caches = session.force_invalidate_caches + rebuild_caches = session.rebuild_caches + #if abs((datetime.datetime.strptime(session.cfg.get_setting('aliases_processed')[:-3], + # '%Y-%m-%d %I:%M:%S.%f') - datetime.datetime.now()).total_seconds()) // 3600 > int(session.cfg.get_setting( + # 'update_frequency')) else 0 + force_invalidate_caches = session.force_invalidate_caches + create_xlsx_summary_files = session.create_xlsx_summary_files + multithreaded = session.multithreaded + + facade_sequence = [] + if nuke_stored_affiliations: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) @@ -530,6 +560,4 @@ def generate_facade_chain(logger,repo_git): if not limited_run or (limited_run and rebuild_caches): facade_sequence.append(rebuild_unknown_affiliation_and_web_caches_facade_task.si().on_error(facade_error_handler.s()))#rebuild_unknown_affiliation_and_web_caches(session.cfg) - logger.info(f"Facade sequence: {facade_sequence}") - return chain(*facade_sequence) - + return chain(*facade_sequence) \ No newline at end of file diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 764efe255e..57d581bfb6 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -125,6 +125,11 @@ def setup_periodic_tasks(sender, **kwargs): logger.info(f"Scheduling collection every {collection_interval/60} minutes") sender.add_periodic_task(collection_interval, augur_collection_monitor.s()) + #Do longer tasks less often + non_domain_collection_interval = collection_interval * 5 + logger.info(f"Scheduling non-repo-domain collection every {non_domain_collection_interval/60} minutes") + sender.add_periodic_task(non_domain_collection_interval, non_repo_domain_tasks().s) + @after_setup_logger.connect def setup_loggers(*args,**kwargs): diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 28345858ca..e3c28fb5d7 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -156,7 +156,6 @@ def repo_collect_phase(repo_git): repo_info_task, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), chain(generate_facade_chain(logger,repo_git),process_dependency_metrics.si(repo_git)), - collect_releases.si() ) return repo_task_group @@ -237,26 +236,18 @@ def start_data_collection(self): repoStatus.status = CollectionState.COLLECTING.value self.session.commit() -""" @celery.task -def start_task(): - - logger = logging.getLogger(start_task.__name__) - - #Get phase options from the config - with DatabaseSession(logger, engine) as session: - config = AugurConfig(logger, session) - phase_options = config.get_section("Task_Routine") +def non_repo_domain_tasks(): + logger = logging.getLogger(non_repo_domain_tasks.__name__) - #Get list of enabled phases - enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] - enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] + logger.info("Executing non-repo domain tasks") - #print(f"disabled: {disabled_phases}") - augur_collection = AugurTaskRoutine(collection_phases=enabled_phases) + tasks = group( + generate_non_repo_domain_facade_tasks(logger), + collect_releases.si() + ) - augur_collection.start_data_collection() -""" + tasks.apply_async() From a237ee2185866f114714399b29889b7bdc5eaa9a Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 27 Jan 2023 16:09:54 -0600 Subject: [PATCH 058/134] working on link_commits_to_contrib Signed-off-by: Isaac Milarsky --- augur/tasks/github/facade_github/tasks.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 896321c3c9..b0b86e34e2 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -210,10 +210,9 @@ def link_commits_to_contributor(session,contributorQueue): query = s.sql.text(""" UPDATE commits SET cmt_ght_author_id=:cntrb_id - WHERE cmt_committer_email=:cntrb_email - OR cmt_author_raw_email=:cntrb_email + WHERE + cmt_author_raw_email=:cntrb_email OR cmt_author_email=:cntrb_email - OR cmt_committer_raw_email=:cntrb_email """).bindparams(cntrb_id=cntrb["cntrb_id"],cntrb_email=cntrb["email"]) #engine.execute(query, **data) From 7f43a16297fa92421f80ad655df0da3759a9138a Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sat, 28 Jan 2023 08:42:33 -0600 Subject: [PATCH 059/134] Fix small error Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/api/view/augur_view.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 3afa315a11..7124067ce5 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -63,6 +63,9 @@ def load_user(user_id): db_session = DatabaseSession(logger) user = User.get_user(db_session, user_id) + if not user: + return None + groups = user.groups tokens = user.tokens applications = user.applications @@ -74,8 +77,6 @@ def load_user(user_id): application = token.application db_session.expunge(user) - if not user: - return None # The flask_login library sets a unique session["_id"] # when login_user() is called successfully From c933f8bb835ee2bc3748c0a151c39477ba631c64 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sat, 28 Jan 2023 09:13:29 -0600 Subject: [PATCH 060/134] Reset facade if repo was stopped in middle of collection and set task_id to None after finished Signed-off-by: Andrew Brain --- augur/application/cli/backend.py | 10 +++++++++- augur/tasks/start_tasks.py | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index feaa9922eb..64a19dc0da 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -95,7 +95,15 @@ def start(disable_collection, development, port): time.sleep(5) with DatabaseSession(logger) as session: - session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value).update({CollectionStatus.status: "Pending"}) + collection_status_list = session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value) + + for status in collection_status_list: + repo = status.repo + repo.repo_name = None + repo.repo_path = None + repo.repo_status = "New" + + collection_status_list.update({CollectionStatus.status: "Pending"}) session.commit() augur_collection_monitor.si().apply_async() diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 29cd5dbb64..4bfdeb36bf 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -73,6 +73,7 @@ def task_success(repo_git): collection_status.status = CollectionState.SUCCESS.value collection_status.data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + collection_status.task_id = None session.commit() From f7d4192c91c552829a9d8a12ad4f655539735971 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sat, 28 Jan 2023 10:07:19 -0600 Subject: [PATCH 061/134] Raise hardcoded max repo count Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/tasks/start_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 4bfdeb36bf..bbf630c86e 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -275,7 +275,7 @@ def augur_collection_monitor(): #Get phase options from the config with DatabaseSession(logger, engine) as session: - max_repo_count = 500 + max_repo_count = 1000 days = 30 config = AugurConfig(logger, session) From a2fd79c8bb9f447d9a471f7f85e4d63572874c62 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sat, 28 Jan 2023 10:16:20 -0600 Subject: [PATCH 062/134] Make releases task run on one repo Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/tasks/github/releases/tasks.py | 17 +++++++++-------- augur/tasks/start_tasks.py | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/augur/tasks/github/releases/tasks.py b/augur/tasks/github/releases/tasks.py index a71f3da480..427a3bbd9f 100644 --- a/augur/tasks/github/releases/tasks.py +++ b/augur/tasks/github/releases/tasks.py @@ -5,15 +5,16 @@ import traceback @celery.task -def collect_releases(): +def collect_releases(repo_git): logger = logging.getLogger(collect_releases.__name__) with GithubTaskSession(logger, engine) as session: - query = session.query(Repo) - repos = execute_session_query(query, 'all') - for repo in repos: - try: - releases_model(session, repo.repo_git, repo.repo_id) - except Exception as e: - logger.error(f"Could not collect releases for {repo.repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") + query = session.query(Repo).filter(Repo.repo_git == repo_git) + repo_obj = execute_session_query(query, 'one') + repo_id = repo_obj.repo_id + + try: + releases_model(session, repo_git, repo_id) + except Exception as e: + logger.error(f"Could not collect releases for {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index bbf630c86e..94390bda54 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -159,7 +159,7 @@ def repo_collect_phase(repo_git): repo_info_task, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), chain(generate_facade_chain(logger,repo_git),process_dependency_metrics.si(repo_git)), - collect_releases.si() + collect_releases.si(repo_git) ) return repo_task_group From 937dbd883a85de02d35fd4370bae2d4d2f3cd3c6 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sat, 28 Jan 2023 10:30:34 -0600 Subject: [PATCH 063/134] Redus max repo count Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/tasks/start_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 94390bda54..71c320b674 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -275,7 +275,7 @@ def augur_collection_monitor(): #Get phase options from the config with DatabaseSession(logger, engine) as session: - max_repo_count = 1000 + max_repo_count = 500 days = 30 config = AugurConfig(logger, session) From 8c711d8053bae495200e60866c740696ad68cf17 Mon Sep 17 00:00:00 2001 From: Sean Goggins Date: Sun, 29 Jan 2023 12:18:33 -0600 Subject: [PATCH 064/134] Table name variable not assigned. --- .../git/util/facade_worker/facade_worker/facade01config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py index f0f16b3d38..739ce885b5 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py @@ -221,7 +221,7 @@ def insert_or_update_data(self, query, **bind_args)-> None: if isinstance(e.orig, DeadlockDetected): deadlock_detected = True sleep_time = random.choice(sleep_time_list) - self.logger.debug(f"Deadlock detected on {table.__table__} table...trying again in {round(sleep_time)} seconds: transaction size: {len(data)}") + self.logger.debug(f"Deadlock detected on table...trying again in {round(sleep_time)} seconds: transaction size: {len(data)}") time.sleep(sleep_time) attempts += 1 From 1409acecba3eca1baf5c214d192955e3a01cd4ba Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sun, 29 Jan 2023 14:19:39 -0600 Subject: [PATCH 065/134] Narrowing scope of GitHub committer update queries. --- augur/tasks/github/facade_github/tasks.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 896321c3c9..4811b77f0e 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -210,10 +210,11 @@ def link_commits_to_contributor(session,contributorQueue): query = s.sql.text(""" UPDATE commits SET cmt_ght_author_id=:cntrb_id - WHERE cmt_committer_email=:cntrb_email - OR cmt_author_raw_email=:cntrb_email + WHERE --cmt_committer_email=:cntrb_email + --OR + cmt_author_raw_email=:cntrb_email OR cmt_author_email=:cntrb_email - OR cmt_committer_raw_email=:cntrb_email + --OR cmt_committer_raw_email=:cntrb_email """).bindparams(cntrb_id=cntrb["cntrb_id"],cntrb_email=cntrb["email"]) #engine.execute(query, **data) From 707154f184aaa46fdf2e53b0a9502a42f9035d5b Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sun, 29 Jan 2023 14:33:28 -0600 Subject: [PATCH 066/134] Merely commenting out SQL does not work. --- augur/tasks/github/facade_github/tasks.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 4811b77f0e..41c0249786 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -210,11 +210,9 @@ def link_commits_to_contributor(session,contributorQueue): query = s.sql.text(""" UPDATE commits SET cmt_ght_author_id=:cntrb_id - WHERE --cmt_committer_email=:cntrb_email - --OR + WHERE cmt_author_raw_email=:cntrb_email OR cmt_author_email=:cntrb_email - --OR cmt_committer_raw_email=:cntrb_email """).bindparams(cntrb_id=cntrb["cntrb_id"],cntrb_email=cntrb["email"]) #engine.execute(query, **data) From 227edf1a7101874f5bec42510dfd888685b3370b Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Sun, 29 Jan 2023 14:56:22 -0600 Subject: [PATCH 067/134] missing traceback import. --- augur/tasks/github/messages/tasks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index 9290da9236..5180c1982b 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -1,6 +1,7 @@ import time import logging +import traceback from augur.tasks.init.celery_app import celery_app as celery, engine from augur.application.db.data_parse import * From d47e996dfec8abb3cb4191ea9e98adb492ba8fff Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sun, 29 Jan 2023 16:18:16 -0600 Subject: [PATCH 068/134] Add logs Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/db/session.py | 1 + augur/tasks/init/celery_app.py | 1 + 2 files changed, 2 insertions(+) diff --git a/augur/application/db/session.py b/augur/application/db/session.py index edaa16a70a..fff55f242f 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -63,6 +63,7 @@ def __init__(self, logger, engine=None): self.engine_created = True self.engine = DatabaseEngine().engine + logger.info("ENGINE CREATE: Created engine in session") super().__init__(self.engine) diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 764efe255e..1a07f1cae4 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -146,6 +146,7 @@ def init_worker(**kwargs): from augur.application.db.engine import DatabaseEngine engine = DatabaseEngine().engine + logger.info(f"Creating database engine for worker. Engine: {engine}") @worker_process_shutdown.connect From 3f7beb42ce0bf639f53947b4709118724e749fd4 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Sun, 29 Jan 2023 16:36:51 -0600 Subject: [PATCH 069/134] Add logs Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/db/session.py | 2 ++ augur/tasks/github/pull_requests/tasks.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/augur/application/db/session.py b/augur/application/db/session.py index fff55f242f..371d0cb462 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -53,6 +53,8 @@ class DatabaseSession(Session): def __init__(self, logger, engine=None): + logger.info(f"Engine passed to session: {engine}") + self.logger = logger self.engine = engine self.engine_created = False diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 9a8ecfb33b..e449f4b971 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -22,6 +22,8 @@ def collect_pull_requests(repo_git: str) -> None: logger = logging.getLogger(collect_pull_requests.__name__) + logger.info(f"Celery engine: {engine}") + with DatabaseSession(logger, engine) as session: From 965a22225e287b12e0bdf72bd1673c0e8cba3c02 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 11:09:41 -0600 Subject: [PATCH 070/134] Add handlers for the eventlet worker engine creation and deletion Signed-off-by: Andrew Brain --- augur/tasks/init/celery_app.py | 21 ++++++++++++++++++++- augur/tasks/init/celery_worker.py | 4 ++++ 2 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 augur/tasks/init/celery_worker.py diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 1a07f1cae4..edea20add1 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -1,5 +1,5 @@ """Defines the Celery app.""" -from celery.signals import worker_process_init, worker_process_shutdown +from celery.signals import worker_process_init, worker_process_shutdown, eventlet_pool_started, eventlet_pool_preshutdown, eventlet_pool_postshutdown import logging from typing import List, Dict import os @@ -156,3 +156,22 @@ def shutdown_worker(**kwargs): logger.info('Closing database connectionn for worker') engine.dispose() + +@eventlet_pool_started.connect +def init_eventlet_worker(**kwargs): + + global engine + + from augur.application.db.engine import DatabaseEngine + + engine = DatabaseEngine().engine + logger.info(f"Creating database engine for worker. Engine: {id(engine)}") + +@eventlet_pool_postshutdown.connect +def shutdown_eventlet_worker(**kwargs): + global engine + if engine: + logger.info(f'Closing database connectionn for worker. Engine {id(engine)}') + engine.dispose() + + diff --git a/augur/tasks/init/celery_worker.py b/augur/tasks/init/celery_worker.py new file mode 100644 index 0000000000..c9a76569a7 --- /dev/null +++ b/augur/tasks/init/celery_worker.py @@ -0,0 +1,4 @@ +from celery.signals import worker_process_init, worker_process_shutdown + +print("Celery worker") + From ba6d24bd5eebf08164d5c8c417107958f38a1c41 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 12:58:51 -0600 Subject: [PATCH 071/134] Improve database connections' Signed-off-by: Andrew Brain --- augur/tasks/data_analysis/__init__.py | 3 +- .../data_analysis/clustering_worker/tasks.py | 4 +- .../contributor_breadth_worker.py | 8 +- .../data_analysis/discourse_analysis/tasks.py | 7 +- .../data_analysis/insight_worker/tasks.py | 84 ++++++++++--------- .../data_analysis/message_insights/tasks.py | 19 ++--- .../pull_request_analysis_worker/tasks.py | 26 +++--- augur/tasks/db/refresh_materialized_views.py | 4 +- augur/tasks/git/dependency_tasks/tasks.py | 4 +- augur/tasks/git/facade_tasks.py | 56 ++++++++++++- .../facade_worker/facade01config.py | 4 +- augur/tasks/github/contributors/tasks.py | 4 +- augur/tasks/github/detect_move/tasks.py | 5 +- augur/tasks/github/events/tasks.py | 8 +- augur/tasks/github/facade_github/tasks.py | 5 +- augur/tasks/github/issues/tasks.py | 10 ++- augur/tasks/github/messages/tasks.py | 8 +- .../pull_requests/commits_model/tasks.py | 5 +- augur/tasks/github/pull_requests/core.py | 9 +- .../github/pull_requests/files_model/tasks.py | 5 +- augur/tasks/github/pull_requests/tasks.py | 12 ++- augur/tasks/github/releases/tasks.py | 4 +- augur/tasks/github/repo_info/tasks.py | 4 +- augur/tasks/start_tasks.py | 13 ++- 24 files changed, 219 insertions(+), 92 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 3324137668..5629644f98 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -14,10 +14,11 @@ def machine_learning_phase(): from augur.tasks.data_analysis.message_insights.tasks import message_insight_model from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model + from augur.tasks.init.celery_app import engine logger = logging.getLogger(machine_learning_phase.__name__) - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index 649fc81cfb..91f01912ef 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -19,7 +19,7 @@ from textblob import TextBlob from collections import Counter -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, RepoClusterMessage, RepoTopic, TopicWord @@ -34,6 +34,8 @@ @celery.task def clustering_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(clustering_model.__name__) logger.info(f"Starting clustering analysis for {repo_git}") diff --git a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py index 183290edd4..1695dc935b 100644 --- a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py +++ b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py @@ -3,7 +3,7 @@ import pandas as pd import sqlalchemy as s -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.tasks.github.util.github_paginator import GithubPaginator from augur.application.db.models import ContributorRepo @@ -25,6 +25,8 @@ @celery.task def contributor_breadth_model() -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(contributor_breadth_model.__name__) tool_source = 'Contributor Breadth Worker' @@ -43,8 +45,8 @@ def contributor_breadth_model() -> None: WHERE gh_login IS NOT NULL """) - with DatabaseEngine(connection_pool_size=1) as engine: - current_cntrb_logins = json.loads(pd.read_sql(cntrb_login_query, engine, params={}).to_json(orient="records")) + + current_cntrb_logins = json.loads(pd.read_sql(cntrb_login_query, engine, params={}).to_json(orient="records")) ## We need a list of all contributors so we can iterate through them to gather events ## We need a list of event ids to avoid insertion of duplicate events. We ignore the event diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index b11b711e2d..c14d9146da 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -6,7 +6,7 @@ import nltk from collections import Counter -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, DiscourseInsight from augur.application.db.engine import DatabaseEngine @@ -34,6 +34,8 @@ @celery.task def discourse_analysis_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(discourse_analysis_model.__name__) tool_source = 'Discourse Worker' @@ -64,8 +66,7 @@ def discourse_analysis_model(repo_git: str) -> None: """) # result = db.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - with DatabaseEngine(connection_pool_size=1) as engine: - msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, engine, params={"repo_id": repo_id}) + msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, engine, params={"repo_id": repo_id}) msg_df_cur_repo = msg_df_cur_repo.sort_values(by=['thread_id']).reset_index(drop=True) logger.info(msg_df_cur_repo.head()) diff --git a/augur/tasks/data_analysis/insight_worker/tasks.py b/augur/tasks/data_analysis/insight_worker/tasks.py index 16f8cfaeba..aefc849744 100644 --- a/augur/tasks/data_analysis/insight_worker/tasks.py +++ b/augur/tasks/data_analysis/insight_worker/tasks.py @@ -13,7 +13,7 @@ from sklearn.ensemble import IsolationForest import warnings -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, ChaossMetricStatus, RepoInsight, RepoInsightsRecord @@ -26,6 +26,8 @@ @celery.task def insight_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(insight_model.__name__) refresh = True @@ -98,46 +100,45 @@ def insight_model(repo_git: str) -> None: return """ Deletion of old insights """ - with DatabaseEngine(connection_pool_size=1) as engine: - # Delete previous insights not in the anomaly_days param - min_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) - logger.info("MIN DATE: {}\n".format(min_date)) - logger.info("Deleting out of date records ...\n") - delete_record_SQL = s.sql.text(""" - DELETE - FROM - repo_insights_records - WHERE - repo_id = :repo_id + # Delete previous insights not in the anomaly_days param + min_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) + logger.info("MIN DATE: {}\n".format(min_date)) + logger.info("Deleting out of date records ...\n") + delete_record_SQL = s.sql.text(""" + DELETE + FROM + repo_insights_records + WHERE + repo_id = :repo_id + AND ri_date < :min_date + """) + result = engine.execute(delete_record_SQL, repo_id=repo_id, min_date=min_date) + + logger.info("Deleting out of date data points ...\n") + delete_points_SQL = s.sql.text(""" + DELETE + FROM + repo_insights + USING ( + SELECT ri_metric, ri_field + FROM ( + SELECT * + FROM repo_insights + WHERE ri_fresh = TRUE + AND repo_id = :repo_id AND ri_date < :min_date - """) - result = engine.execute(delete_record_SQL, repo_id=repo_id, min_date=min_date) + ) old_insights + ) to_delete + WHERE repo_insights.ri_metric = to_delete.ri_metric + AND repo_insights.ri_field = to_delete.ri_field + """) + result = engine.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) + + # get table values to check for dupes later on - logger.info("Deleting out of date data points ...\n") - delete_points_SQL = s.sql.text(""" - DELETE - FROM - repo_insights - USING ( - SELECT ri_metric, ri_field - FROM ( - SELECT * - FROM repo_insights - WHERE ri_fresh = TRUE - AND repo_id = :repo_id - AND ri_date < :min_date - ) old_insights - ) to_delete - WHERE repo_insights.ri_metric = to_delete.ri_metric - AND repo_insights.ri_field = to_delete.ri_field - """) - result = engine.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - - # get table values to check for dupes later on - - - table_values_sql = s.sql.text("""SELECT * FROM repo_insights_records WHERE repo_id={}""".format(repo_id)) - insight_table_values = pd.read_sql(table_values_sql, engine, params={}) + + table_values_sql = s.sql.text("""SELECT * FROM repo_insights_records WHERE repo_id={}""".format(repo_id)) + insight_table_values = pd.read_sql(table_values_sql, engine, params={}) to_model_columns = df.columns[0:len(metrics) + 1] @@ -308,6 +309,8 @@ def confidence_interval_insights(logger): """ Anomaly detection method based on confidence intervals """ + from augur.tasks.init.celery_app import engine + # Update table of endpoints before we query them all logger.info("Discovering insights for task with entry info: {}".format(entry_info)) @@ -699,6 +702,9 @@ def confidence_interval(data, logger, timeperiod='week', confidence=.95, ): return m, m - h, m + h def update_metrics(api_host, api_port, tool_source, tool_version, logger): + + from augur.tasks.init.celery_app import engine + logger.info("Preparing to update metrics ...\n\n" + "Hitting endpoint: http://{}:{}/api/unstable/metrics/status ...\n".format( api_host, api_port)) diff --git a/augur/tasks/data_analysis/message_insights/tasks.py b/augur/tasks/data_analysis/message_insights/tasks.py index 6a34944473..5377be36db 100644 --- a/augur/tasks/data_analysis/message_insights/tasks.py +++ b/augur/tasks/data_analysis/message_insights/tasks.py @@ -11,7 +11,7 @@ from augur.tasks.data_analysis.message_insights.message_novelty import novelty_analysis from augur.tasks.data_analysis.message_insights.message_sentiment import get_senti_score -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, MessageAnalysis, MessageAnalysisSummary @@ -25,6 +25,8 @@ @celery.task def message_insight_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(message_insight_model.__name__) full_train = True @@ -53,8 +55,7 @@ def message_insight_model(repo_git: str) -> None: repo_exists_SQL = s.sql.text(""" SELECT exists (SELECT 1 FROM augur_data.message_analysis_summary WHERE repo_id = :repo_id LIMIT 1)""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_rep = pd.read_sql_query(repo_exists_SQL, engine, params={'repo_id': repo_id}) + df_rep = pd.read_sql_query(repo_exists_SQL, engine, params={'repo_id': repo_id}) #full_train = not(df_rep['exists'].iloc[0]) logger.info(f'Full Train: {full_train}') @@ -79,8 +80,7 @@ def message_insight_model(repo_git: str) -> None: where message.repo_id = :repo_id """) - with DatabaseEngine(connection_pool_size=1) as engine: - df_past = pd.read_sql_query(past_SQL, engine, params={'repo_id': repo_id}) + df_past = pd.read_sql_query(past_SQL, engine, params={'repo_id': repo_id}) df_past['msg_timestamp'] = pd.to_datetime(df_past['msg_timestamp']) df_past = df_past.sort_values(by='msg_timestamp') @@ -120,8 +120,7 @@ def message_insight_model(repo_git: str) -> None: left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where message.repo_id = :repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_message = pd.read_sql_query(join_SQL, engine, params={'repo_id': repo_id, 'begin_date': begin_date}) + df_message = pd.read_sql_query(join_SQL, engine, params={'repo_id': repo_id, 'begin_date': begin_date}) logger.info(f'Messages dataframe dim: {df_message.shape}') logger.info(f'Value 1: {df_message.shape[0]}') @@ -156,8 +155,7 @@ def message_insight_model(repo_git: str) -> None: left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where issue_message_ref.repo_id = :repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_past = pd.read_sql_query(merge_SQL, engine, params={'repo_id': repo_id}) + df_past = pd.read_sql_query(merge_SQL, engine, params={'repo_id': repo_id}) df_past = df_past.loc[df_past['novelty_flag'] == 0] rec_errors = df_past['reconstruction_error'].tolist() threshold = threshold_otsu(np.array(rec_errors)) @@ -345,8 +343,7 @@ def message_insight_model(repo_git: str) -> None: FROM message_analysis_summary WHERE repo_id=:repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_past = pd.read_sql_query(message_analysis_query, engine, params={'repo_id': repo_id}) + df_past = pd.read_sql_query(message_analysis_query, engine, params={'repo_id': repo_id}) # df_past = get_table_values(cols=['period', 'positive_ratio', 'negative_ratio', 'novel_count'], # tables=['message_analysis_summary'], diff --git a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py index d6cd816782..76b0514964 100644 --- a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py +++ b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py @@ -8,7 +8,7 @@ from augur.tasks.data_analysis.message_insights.message_sentiment import get_senti_score -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, PullRequestAnalysis @@ -24,6 +24,8 @@ @celery.task def pull_request_analysis_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(pull_request_analysis_model.__name__) tool_source = 'Pull Request Analysis Worker' @@ -65,8 +67,7 @@ def pull_request_analysis_model(repo_git: str) -> None: and pr_src_state like 'open' """) - with DatabaseEngine(connection_pool_size=1) as engine: - df_pr = pd.read_sql_query(pr_SQL, engine, params={'begin_date': begin_date, 'repo_id': repo_id}) + df_pr = pd.read_sql_query(pr_SQL, engine, params={'begin_date': begin_date, 'repo_id': repo_id}) logger.info(f'PR Dataframe dim: {df_pr.shape}\n') @@ -98,16 +99,14 @@ def pull_request_analysis_model(repo_git: str) -> None: left outer join augur_data.issue_message_ref on message.msg_id = issue_message_ref.msg_id left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where issue_message_ref.repo_id = :repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_message = pd.read_sql_query(messages_SQL, engine, params={'repo_id': repo_id}) + df_message = pd.read_sql_query(messages_SQL, engine, params={'repo_id': repo_id}) logger.info(f'Mapping messages to PR, find comment & participants counts') # Map PR to its corresponding messages - with DatabaseEngine(connection_pool_size=1) as engine: - pr_ref_sql = s.sql.text("select * from augur_data.pull_request_message_ref") - df_pr_ref = pd.read_sql_query(pr_ref_sql, engine) + pr_ref_sql = s.sql.text("select * from augur_data.pull_request_message_ref") + df_pr_ref = pd.read_sql_query(pr_ref_sql, engine) df_merge = pd.merge(df_pr, df_pr_ref, on='pull_request_id', how='left') df_merge = pd.merge(df_merge, df_message, on='msg_id', how='left') df_merge = df_merge.dropna(subset=['msg_id'], axis=0) @@ -156,12 +155,11 @@ def pull_request_analysis_model(repo_git: str) -> None: logger.info(f'Fetching repo statistics') # Get repo info - with DatabaseEngine(connection_pool_size=1) as engine: - repo_sql = s.sql.text(""" - SELECT repo_id, pull_requests_merged, pull_request_count,watchers_count, last_updated FROM - augur_data.repo_info where repo_id = :repo_id - """) - df_repo = pd.read_sql_query(repo_sql, engine, params={'repo_id': repo_id}) + repo_sql = s.sql.text(""" + SELECT repo_id, pull_requests_merged, pull_request_count,watchers_count, last_updated FROM + augur_data.repo_info where repo_id = :repo_id + """) + df_repo = pd.read_sql_query(repo_sql, engine, params={'repo_id': repo_id}) df_repo = df_repo.loc[df_repo.groupby('repo_id').last_updated.idxmax(), :] df_repo = df_repo.drop(['last_updated'], axis=1) diff --git a/augur/tasks/db/refresh_materialized_views.py b/augur/tasks/db/refresh_materialized_views.py index 4c15434da2..53b29ddbd2 100644 --- a/augur/tasks/db/refresh_materialized_views.py +++ b/augur/tasks/db/refresh_materialized_views.py @@ -4,13 +4,15 @@ from celery import signature from celery import group, chain, chord, signature -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession @celery.task def refresh_materialized_views(): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(refresh_materialized_views.__name__) refresh_view_query = s.sql.text(""" diff --git a/augur/tasks/git/dependency_tasks/tasks.py b/augur/tasks/git/dependency_tasks/tasks.py index 611070f380..7e151a1b26 100644 --- a/augur/tasks/git/dependency_tasks/tasks.py +++ b/augur/tasks/git/dependency_tasks/tasks.py @@ -10,9 +10,11 @@ def process_dependency_metrics(repo_git): #raise NotImplementedError + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_dependency_metrics.__name__) - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: query = session.query(Repo).filter(Repo.repo_git == repo_git) repo = execute_session_query(query,'one') diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 5597f9efb8..c7e27668b8 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -29,7 +29,7 @@ from augur.tasks.util.worker_util import create_grouped_task_load -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db import data_parse @@ -46,6 +46,9 @@ #if it does. @celery.task def facade_error_handler(request,exc,traceback): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_error_handler.__name__) logger.error(f"Task {request.id} raised exception: {exc}! \n {traceback}") @@ -64,6 +67,9 @@ def facade_error_handler(request,exc,traceback): #Predefine facade collection with tasks @celery.task def facade_analysis_init_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_analysis_init_facade_task.__name__) with FacadeSession(logger) as session: session.update_status('Running analysis') @@ -71,8 +77,10 @@ def facade_analysis_init_facade_task(): @celery.task def grab_comitters(repo_id,platform="github"): - logger = logging.getLogger(grab_comitters.__name__) + from augur.tasks.init.celery_app import engine + + logger = logging.getLogger(grab_comitters.__name__) try: grab_committer_list(GithubTaskSession(logger, engine), repo_id,platform) @@ -82,6 +90,9 @@ def grab_comitters(repo_id,platform="github"): @celery.task def trim_commits_facade_task(repo_id): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(trim_commits_facade_task.__name__) session = FacadeSession(logger) @@ -129,6 +140,9 @@ def update_analysis_log(repos_id,status): @celery.task def trim_commits_post_analysis_facade_task(repo_id): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) @@ -214,6 +228,9 @@ def update_analysis_log(repos_id,status): @celery.task def facade_analysis_end_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_analysis_end_facade_task.__name__) FacadeSession(logger).log_activity('Info','Running analysis (complete)') @@ -221,6 +238,9 @@ def facade_analysis_end_facade_task(): @celery.task def facade_start_contrib_analysis_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_start_contrib_analysis_task.__name__) session = FacadeSession(logger) session.update_status('Updating Contributors') @@ -233,6 +253,8 @@ def analyze_commits_in_parallel(repo_id, multithreaded: bool)-> None: """Take a large list of commit data to analyze and store in the database. Meant to be run in parallel with other instances of this task. """ + from augur.tasks.init.celery_app import engine + #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) # TODO: Is this session ever closed? @@ -320,6 +342,9 @@ def analyze_commits_in_parallel(repo_id, multithreaded: bool)-> None: @celery.task def nuke_affiliations_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(nuke_affiliations_facade_task.__name__) # TODO: Is this session ever closed? session = FacadeSession(logger) @@ -328,12 +353,18 @@ def nuke_affiliations_facade_task(): @celery.task def fill_empty_affiliations_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(fill_empty_affiliations_facade_task.__name__) with FacadeSession(logger) as session: fill_empty_affiliations(session) @celery.task def invalidate_caches_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(invalidate_caches_facade_task.__name__) with FacadeSession(logger) as session: @@ -341,6 +372,9 @@ def invalidate_caches_facade_task(): @celery.task def rebuild_unknown_affiliation_and_web_caches_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(rebuild_unknown_affiliation_and_web_caches_facade_task.__name__) with FacadeSession(logger) as session: @@ -348,6 +382,9 @@ def rebuild_unknown_affiliation_and_web_caches_facade_task(): @celery.task def force_repo_analysis_facade_task(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(force_repo_analysis_facade_task.__name__) with FacadeSession(logger) as session: @@ -355,6 +392,9 @@ def force_repo_analysis_facade_task(repo_git): @celery.task def git_repo_cleanup_facade_task(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(git_repo_cleanup_facade_task.__name__) with FacadeSession(logger) as session: @@ -362,6 +402,9 @@ def git_repo_cleanup_facade_task(repo_git): @celery.task def git_repo_initialize_facade_task(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(git_repo_initialize_facade_task.__name__) with FacadeSession(logger) as session: @@ -369,6 +412,9 @@ def git_repo_initialize_facade_task(repo_git): @celery.task def check_for_repo_updates_facade_task(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(check_for_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: @@ -376,6 +422,9 @@ def check_for_repo_updates_facade_task(repo_git): @celery.task def force_repo_updates_facade_task(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(force_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: @@ -383,6 +432,9 @@ def force_repo_updates_facade_task(repo_git): @celery.task def git_repo_updates_facade_task(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(git_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py index 739ce885b5..0d0942482b 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py @@ -111,9 +111,11 @@ class FacadeSession(GithubTaskSession): create_xlsx_summary_files (int): toggles whether to create excel summary files """ def __init__(self,logger: Logger): + + from augur.tasks.init.celery_app import engine #self.cfg = FacadeConfig(logger) self.repos_processed = 0 - super().__init__(logger=logger) + super().__init__(logger=logger, engine=engine) # Figure out what we need to do worker_options = AugurConfig(logger, self).get_section("Facade") diff --git a/augur/tasks/github/contributors/tasks.py b/augur/tasks/github/contributors/tasks.py index 38967edc4f..0304abca45 100644 --- a/augur/tasks/github/contributors/tasks.py +++ b/augur/tasks/github/contributors/tasks.py @@ -2,7 +2,7 @@ import logging -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -14,6 +14,8 @@ @celery.task def process_contributors(): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_contributors.__name__) tool_source = "Contributors task" diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index 5f96b22b63..a61f25615a 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -1,12 +1,15 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.detect_move.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query @celery.task() def detect_github_repo_move(repo_git : str) -> None: + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(detect_github_repo_move.__name__) logger.info(f"Starting repo_move operation with {repo_git}") diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 291dc93d90..eced45325a 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -2,7 +2,7 @@ import logging import traceback -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -17,6 +17,8 @@ @celery.task() def collect_events(repo_git: str): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_events.__name__) with DatabaseSession(logger, engine) as session: @@ -47,6 +49,8 @@ def collect_events(repo_git: str): def retrieve_all_event_data(repo_git: str, logger): + from augur.tasks.init.celery_app import engine + owner, repo = get_owner_repo(repo_git) logger.info(f"Collecting Github events for {owner}/{repo}") @@ -78,6 +82,8 @@ def retrieve_all_event_data(repo_git: str, logger): return all_data def process_events(events, task_name, repo_id, logger): + + from augur.tasks.init.celery_app import engine tool_source = "Github events task" tool_version = "2.0" diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 41c0249786..24c09b9c19 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -2,7 +2,7 @@ import logging -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -225,6 +225,9 @@ def link_commits_to_contributor(session,contributorQueue): # Update the contributors table from the data facade has gathered. @celery.task def insert_facade_contributors(repo_id): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(insert_facade_contributors.__name__) with GithubTaskSession(logger, engine) as session: diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index 6553a1a9a2..1fe9778976 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -6,7 +6,7 @@ from sqlalchemy.exc import IntegrityError -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -21,9 +21,11 @@ @celery.task() def collect_issues(repo_git : str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_issues.__name__) - with DatabaseSession(logger, engine) as session: + with GithubTaskSession(logger, engine) as session: try: @@ -48,6 +50,8 @@ def collect_issues(repo_git : str) -> None: def retrieve_all_issue_data(repo_git, logger) -> None: + from augur.tasks.init.celery_app import engine + owner, repo = get_owner_repo(repo_git) logger.info(f"Collecting issues for {owner}/{repo}") @@ -85,6 +89,8 @@ def retrieve_all_issue_data(repo_git, logger) -> None: return all_data def process_issues(issues, task_name, repo_id, logger) -> None: + + from augur.tasks.init.celery_app import engine # get repo_id or have it passed tool_source = "Issue Task" diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index 5180c1982b..022cc6361c 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -3,7 +3,7 @@ import traceback -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -21,6 +21,8 @@ @celery.task() def collect_github_messages(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_github_messages.__name__) with DatabaseSession(logger, engine) as session: @@ -46,6 +48,8 @@ def collect_github_messages(repo_git: str) -> None: def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: + from augur.tasks.init.celery_app import engine + owner, repo = get_owner_repo(repo_git) # url to get issue and pull request comments @@ -86,6 +90,8 @@ def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: def process_messages(messages, task_name, repo_id, logger): + from augur.tasks.init.celery_app import engine + tool_source = "Pr comment task" tool_version = "2.0" data_source = "Github API" diff --git a/augur/tasks/github/pull_requests/commits_model/tasks.py b/augur/tasks/github/pull_requests/commits_model/tasks.py index 06e2e9e854..cd969ae236 100644 --- a/augur/tasks/github/pull_requests/commits_model/tasks.py +++ b/augur/tasks/github/pull_requests/commits_model/tasks.py @@ -2,12 +2,15 @@ import traceback from augur.application.db.session import DatabaseSession from augur.tasks.github.pull_requests.commits_model.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query @celery.task() def process_pull_request_commits(repo_git: str) -> None: + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_pull_request_commits.__name__) with DatabaseSession(logger, engine) as session: diff --git a/augur/tasks/github/pull_requests/core.py b/augur/tasks/github/pull_requests/core.py index 9cd8008993..03a6b80438 100644 --- a/augur/tasks/github/pull_requests/core.py +++ b/augur/tasks/github/pull_requests/core.py @@ -5,7 +5,6 @@ from augur.application.db.data_parse import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.util.util import add_key_value_pair_to_dicts from augur.tasks.util.worker_util import remove_duplicate_dicts @@ -224,6 +223,8 @@ def insert_pr_labels(labels: List[dict], logger: logging.Logger) -> None: labels: list of labels to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # we are using pr_src_id and pull_request_id to determine if the label is already in the database. @@ -241,6 +242,8 @@ def insert_pr_assignees(assignees: List[dict], logger: logging.Logger) -> None: assignees: list of assignees to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # we are using pr_assignee_src_id and pull_request_id to determine if the label is already in the database. @@ -258,6 +261,8 @@ def insert_pr_reviewers(reviewers: List[dict], logger: logging.Logger) -> None: reviewers: list of reviewers to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # we are using pr_src_id and pull_request_id to determine if the label is already in the database. @@ -275,6 +280,8 @@ def insert_pr_metadata(metadata: List[dict], logger: logging.Logger) -> None: metadata: list of metadata to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # inserting pr metadata diff --git a/augur/tasks/github/pull_requests/files_model/tasks.py b/augur/tasks/github/pull_requests/files_model/tasks.py index 554959d147..f0b0a7f142 100644 --- a/augur/tasks/github/pull_requests/files_model/tasks.py +++ b/augur/tasks/github/pull_requests/files_model/tasks.py @@ -2,11 +2,14 @@ import traceback from augur.application.db.session import DatabaseSession from augur.tasks.github.pull_requests.files_model.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query @celery.task() def process_pull_request_files(repo_git: str) -> None: + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_pull_request_files.__name__) with DatabaseSession(logger, engine) as session: diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index e449f4b971..830bbf89f5 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -3,7 +3,7 @@ import traceback from augur.tasks.github.pull_requests.core import extract_data_from_pr_list -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -20,6 +20,8 @@ @celery.task() def collect_pull_requests(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_pull_requests.__name__) logger.info(f"Celery engine: {engine}") @@ -47,6 +49,8 @@ def collect_pull_requests(repo_git: str) -> None: # TODO: Fix column names in pull request labels table def retrieve_all_pr_data(repo_git: str, logger) -> None: + from augur.tasks.init.celery_app import engine + owner, repo = get_owner_repo(repo_git) # define GithubTaskSession to handle insertions, and store oauth keys @@ -82,6 +86,8 @@ def retrieve_all_pr_data(repo_git: str, logger) -> None: def process_pull_requests(pull_requests, task_name, repo_id, logger): + from augur.tasks.init.celery_app import engine + tool_source = "Pr Task" tool_version = "2.0" data_source = "Github API" @@ -216,6 +222,8 @@ def process_pull_requests(pull_requests, task_name, repo_id, logger): @celery.task def pull_request_review_comments(repo_git: str) -> None: + + from augur.tasks.init.celery_app import engine owner, repo = get_owner_repo(repo_git) @@ -310,6 +318,8 @@ def pull_request_review_comments(repo_git: str) -> None: @celery.task def pull_request_reviews(repo_git: str, pr_number_list: [int]) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(pull_request_reviews.__name__) owner, repo = get_owner_repo(repo_git) diff --git a/augur/tasks/github/releases/tasks.py b/augur/tasks/github/releases/tasks.py index 427a3bbd9f..a6d0d81454 100644 --- a/augur/tasks/github/releases/tasks.py +++ b/augur/tasks/github/releases/tasks.py @@ -1,12 +1,14 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.releases.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query import traceback @celery.task def collect_releases(repo_git): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_releases.__name__) with GithubTaskSession(logger, engine) as session: diff --git a/augur/tasks/github/repo_info/tasks.py b/augur/tasks/github/repo_info/tasks.py index aef3a0f57c..1a37386f72 100644 --- a/augur/tasks/github/repo_info/tasks.py +++ b/augur/tasks/github/repo_info/tasks.py @@ -1,13 +1,15 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.application.db.session import DatabaseSession from augur.tasks.github.repo_info.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query import traceback @celery.task() def collect_repo_info(repo_git: str): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_repo_info.__name__) with GithubTaskSession(logger, engine) as session: diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 71c320b674..8d130ff61c 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -25,7 +25,7 @@ from augur.tasks.git.facade_tasks import * from augur.tasks.db.refresh_materialized_views import * # from augur.tasks.data_analysis import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from celery.result import allow_join_result from augur.application.logs import AugurLogger from augur.application.config import AugurConfig @@ -59,6 +59,9 @@ def collection_task_wrapper(self,*args,**kwargs): @celery.task def task_success(repo_git): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(task_success.__name__) logger.info(f"Repo '{repo_git}' succeeded") @@ -79,6 +82,9 @@ def task_success(repo_git): @celery.task def task_failed(request,exc,traceback): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(task_failed.__name__) with DatabaseSession(logger,engine) as session: @@ -267,7 +273,10 @@ def start_task(): @celery.task -def augur_collection_monitor(): +def augur_collection_monitor(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(augur_collection_monitor.__name__) logger.info("Checking for repos to collect") From 799ed88d2fad1709605f8f15d6c0f7779349bff7 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 13:11:50 -0600 Subject: [PATCH 072/134] Improve connections Signed-off-by: Andrew Brain --- augur/application/db/session.py | 3 - augur/tasks/git/dependency_tasks/core.py | 2 +- .../facade_worker/facade00mainprogram.py | 1 - .../pull_requests/commits_model/core.py | 3 +- .../github/pull_requests/files_model/core.py | 131 +++++++++--------- .../github/util/github_random_key_auth.py | 1 - 6 files changed, 69 insertions(+), 72 deletions(-) diff --git a/augur/application/db/session.py b/augur/application/db/session.py index 371d0cb462..edaa16a70a 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -53,8 +53,6 @@ class DatabaseSession(Session): def __init__(self, logger, engine=None): - logger.info(f"Engine passed to session: {engine}") - self.logger = logger self.engine = engine self.engine_created = False @@ -65,7 +63,6 @@ def __init__(self, logger, engine=None): self.engine_created = True self.engine = DatabaseEngine().engine - logger.info("ENGINE CREATE: Created engine in session") super().__init__(self.engine) diff --git a/augur/tasks/git/dependency_tasks/core.py b/augur/tasks/git/dependency_tasks/core.py index 0be1530663..8bd1be13b1 100644 --- a/augur/tasks/git/dependency_tasks/core.py +++ b/augur/tasks/git/dependency_tasks/core.py @@ -6,7 +6,6 @@ from augur.application.db.data_parse import * from augur.application.db.models import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.application.config import AugurConfig from augur.application.db.util import execute_session_query from augur.tasks.git.dependency_tasks.dependency_util import dependency_calculator as dep_calc @@ -16,6 +15,7 @@ def generate_deps_data(session, repo_id, path): :param repo_id: Repository ID :param path: Absolute path of the Repostiory """ + session.logger.info('Searching for deps in repo') session.logger.info(f'Repo ID: {repo_id}, Path: {path}') diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py b/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py index 2ac69e719c..a6d0e6b386 100755 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py @@ -42,7 +42,6 @@ from augur.tasks.github.facade_github.contributor_interfaceable.contributor_interface import * from augur.tasks.github.util.github_task_session import GithubTaskSession -from augur.tasks.init.celery_app import engine from logging import Logger from sqlalchemy.sql.elements import TextClause diff --git a/augur/tasks/github/pull_requests/commits_model/core.py b/augur/tasks/github/pull_requests/commits_model/core.py index cc9b277889..6e7d7bb22c 100644 --- a/augur/tasks/github/pull_requests/commits_model/core.py +++ b/augur/tasks/github/pull_requests/commits_model/core.py @@ -4,7 +4,6 @@ import sqlalchemy as s from augur.application.db.data_parse import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.application.db.models import * @@ -13,6 +12,8 @@ def pull_request_commits_model(repo_id,logger): + + from augur.tasks.init.celery_app import engine # query existing PRs and the respective url we will append the commits url to pr_url_sql = s.sql.text(""" diff --git a/augur/tasks/github/pull_requests/files_model/core.py b/augur/tasks/github/pull_requests/files_model/core.py index e8c1cdb3f9..c2d810f911 100644 --- a/augur/tasks/github/pull_requests/files_model/core.py +++ b/augur/tasks/github/pull_requests/files_model/core.py @@ -4,7 +4,6 @@ import sqlalchemy as s from augur.application.db.data_parse import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.gh_graphql_entities import GraphQlPageCollection, hit_api_graphql @@ -13,84 +12,86 @@ from augur.application.db.util import execute_session_query def pull_request_files_model(repo_id,logger): + + from augur.tasks.init.celery_app import engine - # query existing PRs and the respective url we will append the commits url to - pr_number_sql = s.sql.text(""" - SELECT DISTINCT pr_src_number as pr_src_number, pull_requests.pull_request_id - FROM pull_requests--, pull_request_meta - WHERE repo_id = :repo_id - """).bindparams(repo_id=repo_id) - pr_numbers = [] - #pd.read_sql(pr_number_sql, self.db, params={}) + # query existing PRs and the respective url we will append the commits url to + pr_number_sql = s.sql.text(""" + SELECT DISTINCT pr_src_number as pr_src_number, pull_requests.pull_request_id + FROM pull_requests--, pull_request_meta + WHERE repo_id = :repo_id + """).bindparams(repo_id=repo_id) + pr_numbers = [] + #pd.read_sql(pr_number_sql, self.db, params={}) - # TODO: Is this session ever closed? - session = GithubTaskSession(logger, engine) - result = session.execute_sql(pr_number_sql).fetchall() - pr_numbers = [dict(zip(row.keys(), row)) for row in result] + # TODO: Is this session ever closed? + session = GithubTaskSession(logger, engine) + result = session.execute_sql(pr_number_sql).fetchall() + pr_numbers = [dict(zip(row.keys(), row)) for row in result] - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') - - owner, name = get_owner_repo(repo.repo_git) + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - pr_file_rows = [] - logger.info(f"Getting pull request files for repo: {repo.repo_git}") - for index,pr_info in enumerate(pr_numbers): + owner, name = get_owner_repo(repo.repo_git) - logger.info(f'Querying files for pull request #{index + 1} of {len(pr_numbers)}') - - query = """ + pr_file_rows = [] + logger.info(f"Getting pull request files for repo: {repo.repo_git}") + for index,pr_info in enumerate(pr_numbers): - query($repo: String!, $owner: String!,$pr_number: Int!, $numRecords: Int!, $cursor: String) { - repository(name: $repo, owner: $owner) { - pullRequest(number: $pr_number) { - files ( first: $numRecords, after: $cursor) - { - edges { - node { - additions - deletions - path - } - } - totalCount - pageInfo { - hasNextPage - endCursor + logger.info(f'Querying files for pull request #{index + 1} of {len(pr_numbers)}') + + query = """ + + query($repo: String!, $owner: String!,$pr_number: Int!, $numRecords: Int!, $cursor: String) { + repository(name: $repo, owner: $owner) { + pullRequest(number: $pr_number) { + files ( first: $numRecords, after: $cursor) + { + edges { + node { + additions + deletions + path } } + totalCount + pageInfo { + hasNextPage + endCursor + } } } } - """ - - values = ("repository","pullRequest","files") - params = { - 'owner' : owner, - 'repo' : name, - 'pr_number' : pr_info['pr_src_number'], - 'values' : values } + """ + + values = ("repository","pullRequest","files") + params = { + 'owner' : owner, + 'repo' : name, + 'pr_number' : pr_info['pr_src_number'], + 'values' : values + } - try: - file_collection = GraphQlPageCollection(query, session.oauths, session.logger,bind=params) + try: + file_collection = GraphQlPageCollection(query, session.oauths, session.logger,bind=params) - pr_file_rows += [{ - 'pull_request_id': pr_info['pull_request_id'], - 'pr_file_additions': pr_file['additions'] if 'additions' in pr_file else None, - 'pr_file_deletions': pr_file['deletions'] if 'deletions' in pr_file else None, - 'pr_file_path': pr_file['path'], - 'data_source': 'GitHub API', - 'repo_id': repo_id, - } for pr_file in file_collection if pr_file and 'path' in pr_file] - except Exception as e: - logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") - logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) + pr_file_rows += [{ + 'pull_request_id': pr_info['pull_request_id'], + 'pr_file_additions': pr_file['additions'] if 'additions' in pr_file else None, + 'pr_file_deletions': pr_file['deletions'] if 'deletions' in pr_file else None, + 'pr_file_path': pr_file['path'], + 'data_source': 'GitHub API', + 'repo_id': repo_id, + } for pr_file in file_collection if pr_file and 'path' in pr_file] + except Exception as e: + logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") + logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) - if len(pr_file_rows) > 0: - #Execute a bulk upsert with sqlalchemy - pr_file_natural_keys = ["pull_request_id", "repo_id", "pr_file_path"] - session.insert_data(pr_file_rows, PullRequestFile, pr_file_natural_keys) + if len(pr_file_rows) > 0: + #Execute a bulk upsert with sqlalchemy + pr_file_natural_keys = ["pull_request_id", "repo_id", "pr_file_path"] + session.insert_data(pr_file_rows, PullRequestFile, pr_file_natural_keys) diff --git a/augur/tasks/github/util/github_random_key_auth.py b/augur/tasks/github/util/github_random_key_auth.py index 3a1e8bec00..56933ab633 100644 --- a/augur/tasks/github/util/github_random_key_auth.py +++ b/augur/tasks/github/util/github_random_key_auth.py @@ -3,7 +3,6 @@ from augur.tasks.util.random_key_auth import RandomKeyAuth from augur.tasks.github.util.github_api_key_handler import GithubApiKeyHandler from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine class GithubRandomKeyAuth(RandomKeyAuth): From 67687a6c7eaecba21eb925d88bcef45c7170d14d Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 30 Jan 2023 13:33:18 -0600 Subject: [PATCH 073/134] syntax for machine_learning_phase Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/__init__.py | 3 +-- augur/tasks/git/facade_tasks.py | 2 +- augur/tasks/init/__init__.py | 2 +- augur/tasks/start_tasks.py | 30 ++++++++++++++++++++------- 4 files changed, 26 insertions(+), 11 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 5629644f98..3324137668 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -14,11 +14,10 @@ def machine_learning_phase(): from augur.tasks.data_analysis.message_insights.tasks import message_insight_model from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model - from augur.tasks.init.celery_app import engine logger = logging.getLogger(machine_learning_phase.__name__) - with DatabaseSession(logger, engine) as session: + with DatabaseSession(logger) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index f24fccbbad..1120e7ed05 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -612,4 +612,4 @@ def generate_non_repo_domain_facade_tasks(logger): if not limited_run or (limited_run and rebuild_caches): facade_sequence.append(rebuild_unknown_affiliation_and_web_caches_facade_task.si().on_error(facade_error_handler.s()))#rebuild_unknown_affiliation_and_web_caches(session.cfg) - return chain(*facade_sequence) \ No newline at end of file + return facade_sequence \ No newline at end of file diff --git a/augur/tasks/init/__init__.py b/augur/tasks/init/__init__.py index eb590a99ab..73a50f3435 100644 --- a/augur/tasks/init/__init__.py +++ b/augur/tasks/init/__init__.py @@ -25,6 +25,6 @@ def get_rabbitmq_conn_string(): with DatabaseSession(logger) as session: config = AugurConfig(logger, session) - rabbbitmq_conn_string = config.get_value("RabbitMQ", "connection_string") + rabbbitmq_conn_string = config.get_value("RabbitMQ", "connection_string") return rabbbitmq_conn_string diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 67a867e8ec..644649f4c6 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -123,7 +123,6 @@ def prelim_phase(repo_git): query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_obj = execute_session_query(query, 'one') - #TODO: if repo has moved mark it as pending. job = detect_github_repo_move.si(repo_obj.repo_git) @@ -171,9 +170,7 @@ def repo_collect_phase(repo_git): return repo_task_group -DEFINED_COLLECTION_PHASES = [prelim_phase, repo_collect_phase] -if os.environ.get('AUGUR_DOCKER_DEPLOY') != "1": - DEFINED_COLLECTION_PHASES.append(machine_learning_phase) +DEFINED_PHASES_PER_REPO = [prelim_phase, repo_collect_phase] class AugurTaskRoutine: @@ -255,8 +252,27 @@ def non_repo_domain_tasks(): logger.info("Executing non-repo domain tasks") - tasks = group( - generate_non_repo_domain_facade_tasks(logger) + enabled_phase_names = [] + with DatabaseSession(logger, engine) as session: + + max_repo_count = 500 + days = 30 + + config = AugurConfig(logger, session) + phase_options = config.get_section("Task_Routine") + + #Get list of enabled phases + enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] + + enabled_tasks = [] + + enabled_tasks.extend(generate_non_repo_domain_facade_tasks(logger)) + + if machine_learning_phase.__name__ in enabled_phase_names: + enabled_tasks.append(machine_learning_phase.si()) + + tasks = chain( + *enabled_tasks ) tasks.apply_async() @@ -283,7 +299,7 @@ def augur_collection_monitor(): #Get list of enabled phases enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] - enabled_phases = [phase for phase in DEFINED_COLLECTION_PHASES if phase.__name__ in enabled_phase_names] + enabled_phases = [phase for phase in DEFINED_PHASES_PER_REPO if phase.__name__ in enabled_phase_names] active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value).all()) From c23ddf53e4ea387822df9b5e989ab18dbab1a594 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 13:40:51 -0600 Subject: [PATCH 074/134] Fixes after merge Signed-off-by: Andrew Brain --- augur/tasks/data_analysis/__init__.py | 3 ++- augur/tasks/start_tasks.py | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 3324137668..5629644f98 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -14,10 +14,11 @@ def machine_learning_phase(): from augur.tasks.data_analysis.message_insights.tasks import message_insight_model from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model + from augur.tasks.init.celery_app import engine logger = logging.getLogger(machine_learning_phase.__name__) - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 644649f4c6..e25032e1ef 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -248,6 +248,10 @@ def start_data_collection(self): @celery.task def non_repo_domain_tasks(): + + from augur.tasks.init.celery_app import engine + + logger = logging.getLogger(non_repo_domain_tasks.__name__) logger.info("Executing non-repo domain tasks") From ade8229509f937a85f0f148a52b1a2787c7a4aaf Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 14:14:44 -0600 Subject: [PATCH 075/134] Make DatabaseEngine class take all params create_engine does Signed-off-by: Andrew Brain --- augur/application/db/engine.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/augur/application/db/engine.py b/augur/application/db/engine.py index 9d2a2621fc..5cb01ee0c4 100644 --- a/augur/application/db/engine.py +++ b/augur/application/db/engine.py @@ -52,9 +52,14 @@ def get_database_string() -> str: class DatabaseEngine(): - def __init__(self, connection_pool_size=5): + def __init__(self, **kwargs): - self._engine = self.create_database_engine(connection_pool_size) + pool_size = kwargs.get("connection_pool_size") + if pool_size: + del kwargs["connection_pool_size"] + kwargs["pool_size"] = pool_size + + self._engine = self.create_database_engine(**kwargs) def __enter__(self): @@ -73,7 +78,7 @@ def engine(self): return self._engine - def create_database_engine(self, connection_pool_size): + def create_database_engine(self, **kwargs): """Create sqlalchemy database engine Note: @@ -90,13 +95,7 @@ def create_database_engine(self, connection_pool_size): db_conn_string = get_database_string() - if connection_pool_size == 1: - engine = create_engine(db_conn_string, poolclass=NullPool) - - elif connection_pool_size < 0: - raise Exception(f"Invalid Pool Size: {connection_pool_size}") - else: - engine = create_engine(db_conn_string, pool_size=connection_pool_size) + engine = create_engine(db_conn_string, **kwargs) @event.listens_for(engine, "connect", insert=True) def set_search_path(dbapi_connection, connection_record): From f50be8e5abd054e7de57c8346be934d280c25f9c Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 30 Jan 2023 14:36:29 -0600 Subject: [PATCH 076/134] check for manually added repos Signed-off-by: Isaac Milarsky --- .../facade_worker/facade05repofetch.py | 2 +- augur/tasks/init/celery_app.py | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index ccd5bd8c20..b6667b2473 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -297,7 +297,7 @@ def git_repo_updates(session,repo_git): #query = s.sql.text("""SELECT repo_id,repo_group_id,repo_git,repo_name,repo_path FROM repo WHERE # repo_status='Update'""") - query = query = session.query(Repo).filter( + query = session.query(Repo).filter( Repo.repo_git == repo_git,Repo.repo_status == 'Update') result = execute_session_query(query, 'all') diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index c00d229304..9db174c29a 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -14,6 +14,7 @@ from augur.application.config import AugurConfig from augur.application.db.engine import get_database_string from augur.tasks.init import get_redis_conn_values, get_rabbitmq_conn_string +from augur.application.db.models import CollectionStatus logger = logging.getLogger(__name__) @@ -101,6 +102,18 @@ def split_tasks_into_groups(augur_tasks: List[str]) -> Dict[str, List[str]]: return grouped_tasks +def create_collection_status(logger): + + with DatabaseSession(logger) as session: + query = s.sql.text(""" + SELECT repo_id FROM repo where repo_id NOT IN (SELECT repo_id FROM augur_operations.collection_status) + """) + + repos = session.execute_sql(query).fetchall() + + for repo in repos: + CollectionStatus.insert(session,repo[0]) + @celery_app.on_after_finalize.connect def setup_periodic_tasks(sender, **kwargs): @@ -117,6 +130,8 @@ def setup_periodic_tasks(sender, **kwargs): """ from augur.tasks.start_tasks import augur_collection_monitor + create_collection_status(logger) + with DatabaseSession(logger) as session: config = AugurConfig(logger, session) From d4a0ea2548b7bb9709822416d72d3d10b6b6021f Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Mon, 30 Jan 2023 14:45:52 -0600 Subject: [PATCH 077/134] Improve connection handling Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/db/session.py | 6 +++++- augur/application/util.py | 5 +++-- augur/tasks/github/issues/tasks.py | 4 ++++ augur/tasks/github/pull_requests/tasks.py | 4 ++++ augur/tasks/init/__init__.py | 5 +++-- augur/tasks/init/celery_app.py | 7 ++++--- augur/tasks/start_tasks.py | 5 +++-- 7 files changed, 26 insertions(+), 10 deletions(-) diff --git a/augur/application/db/session.py b/augur/application/db/session.py index edaa16a70a..2bf037108d 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -51,7 +51,7 @@ def remove_null_characters_from_list_of_dicts(data_list, fields): class DatabaseSession(Session): - def __init__(self, logger, engine=None): + def __init__(self, logger, engine=None, from_msg=None): self.logger = logger self.engine = engine @@ -63,6 +63,10 @@ def __init__(self, logger, engine=None): self.engine_created = True self.engine = DatabaseEngine().engine + if from_msg: + logger.debug(f"ENGINE CREATE: {from_msg}") + else: + logger.debug(f"ENGINE CREATE") super().__init__(self.engine) diff --git a/augur/application/util.py b/augur/application/util.py index 9478ab5a02..46ce99d9da 100644 --- a/augur/application/util.py +++ b/augur/application/util.py @@ -1,13 +1,14 @@ import logging from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.util.repo_load_controller import RepoLoadController logger = logging.getLogger(__name__) def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: controller = RepoLoadController(session) @@ -17,7 +18,7 @@ def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): def get_all_repos_count(): - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: controller = RepoLoadController(session) diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index 1fe9778976..7dd5dcac8d 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -23,6 +23,8 @@ def collect_issues(repo_git : str) -> None: from augur.tasks.init.celery_app import engine + print(f"Eventlet engine id: {id(engine)}") + logger = logging.getLogger(collect_issues.__name__) with GithubTaskSession(logger, engine) as session: @@ -52,6 +54,8 @@ def retrieve_all_issue_data(repo_git, logger) -> None: from augur.tasks.init.celery_app import engine + print(f"Eventlet engine id: {id(engine)}") + owner, repo = get_owner_repo(repo_git) logger.info(f"Collecting issues for {owner}/{repo}") diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 830bbf89f5..520c66986f 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -22,6 +22,8 @@ def collect_pull_requests(repo_git: str) -> None: from augur.tasks.init.celery_app import engine + print(f"Eventlet engine id: {id(engine)}") + logger = logging.getLogger(collect_pull_requests.__name__) logger.info(f"Celery engine: {engine}") @@ -51,6 +53,8 @@ def retrieve_all_pr_data(repo_git: str, logger) -> None: from augur.tasks.init.celery_app import engine + print(f"Eventlet engine id: {id(engine)}") + owner, repo = get_owner_repo(repo_git) # define GithubTaskSession to handle insertions, and store oauth keys diff --git a/augur/tasks/init/__init__.py b/augur/tasks/init/__init__.py index 73a50f3435..194a5ed835 100644 --- a/augur/tasks/init/__init__.py +++ b/augur/tasks/init/__init__.py @@ -1,13 +1,14 @@ import logging from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.application.config import AugurConfig def get_redis_conn_values(): logger = logging.getLogger(__name__) - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: config = AugurConfig(logger, session) @@ -22,7 +23,7 @@ def get_redis_conn_values(): def get_rabbitmq_conn_string(): logger = logging.getLogger(__name__) - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: config = AugurConfig(logger, session) rabbbitmq_conn_string = config.get_value("RabbitMQ", "connection_string") diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 9db174c29a..95be3412da 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -11,6 +11,7 @@ from augur.application.logs import TaskLogConfig from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.application.config import AugurConfig from augur.application.db.engine import get_database_string from augur.tasks.init import get_redis_conn_values, get_rabbitmq_conn_string @@ -129,10 +130,10 @@ def setup_periodic_tasks(sender, **kwargs): The tasks so that they are grouped by the module they are defined in """ from augur.tasks.start_tasks import augur_collection_monitor + from augur.tasks.start_tasks import non_repo_domain_tasks - create_collection_status(logger) - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: config = AugurConfig(logger, session) @@ -184,7 +185,7 @@ def init_eventlet_worker(**kwargs): from augur.application.db.engine import DatabaseEngine - engine = DatabaseEngine().engine + engine = DatabaseEngine(pool_size=40, max_overflow=50, pool_timeout=60).engine logger.info(f"Creating database engine for worker. Engine: {id(engine)}") @eventlet_pool_postshutdown.connect diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index e25032e1ef..773ec2c0d0 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -30,6 +30,7 @@ from augur.application.logs import AugurLogger from augur.application.config import AugurConfig from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query from logging import Logger from enum import Enum @@ -119,7 +120,7 @@ def prelim_phase(repo_git): logger = logging.getLogger(prelim_phase.__name__) job = None - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_obj = execute_session_query(query, 'one') @@ -141,7 +142,7 @@ def repo_collect_phase(repo_git): np_clustered_array = [] #A chain is needed for each repo. - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: query = session.query(Repo).filter(Repo.repo_git == repo_git) repo_obj = execute_session_query(query, 'one') repo_git = repo_obj.repo_git From 2b4c50e985ccf02970f74ed7061126c44bf0b9b9 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 17:53:40 -0600 Subject: [PATCH 078/134] Update schema revison with updated collection_interval Signed-off-by: Andrew Brain --- .../alembic/versions/5_add_collection_status_table.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/augur/application/schema/alembic/versions/5_add_collection_status_table.py b/augur/application/schema/alembic/versions/5_add_collection_status_table.py index a2ce4d2cb3..13f7f5f61e 100644 --- a/augur/application/schema/alembic/versions/5_add_collection_status_table.py +++ b/augur/application/schema/alembic/versions/5_add_collection_status_table.py @@ -39,6 +39,13 @@ def upgrade(): repo_id = repo[0] conn.execute(text(f"""INSERT INTO "augur_operations"."collection_status" ("repo_id") VALUES ({repo_id});""")) + conn.execute(text(""" + UPDATE augur_operations.config + SET value = '600' + WHERE section_name = 'Tasks' + AND setting_name = 'collection_interval'; + """)) + # ### end Alembic commands ### From 71400b1eed03ef730b5c64c130158c2ddc7b1708 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 30 Jan 2023 20:11:42 -0600 Subject: [PATCH 079/134] Wait on process Signed-off-by: Andrew Brain --- augur/application/cli/backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 64a19dc0da..7b75a166da 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -115,7 +115,7 @@ def start(disable_collection, development, port): logger.info("Collection disabled") try: - server.wait() + worker_1_process.wait() except KeyboardInterrupt: if server: From 9b294543d1c43f3de45e30ad9be94071ecd5fb75 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 30 Jan 2023 20:54:38 -0600 Subject: [PATCH 080/134] remove affils Signed-off-by: Isaac Milarsky --- augur/application/cli/backend.py | 17 +++++++++++++++++ augur/tasks/init/celery_app.py | 17 ----------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 7b75a166da..d90a1979fb 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -26,10 +26,25 @@ from augur.application.logs import AugurLogger from augur.application.config import AugurConfig from augur.application.cli import test_connection, test_db_connection +import sqlalchemy as s logger = AugurLogger("augur", reset_logfiles=True).get_logger() + +def create_collection_status(logger): + + with DatabaseSession(logger) as session: + query = s.sql.text(""" + SELECT repo_id FROM repo WHERE repo_id NOT IN (SELECT repo_id FROM augur_operations.collection_status) + """) + + repos = session.execute_sql(query).fetchall() + + for repo in repos: + CollectionStatus.insert(session,repo[0]) + + @click.group('server', short_help='Commands for controlling the backend API server & data collection workers') def cli(): pass @@ -94,6 +109,8 @@ def start(disable_collection, development, port): cpu_worker_process = subprocess.Popen(cpu_worker.split(" ")) time.sleep(5) + create_collection_status(logger) + with DatabaseSession(logger) as session: collection_status_list = session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value) diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 95be3412da..0400b1a77c 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -103,17 +103,7 @@ def split_tasks_into_groups(augur_tasks: List[str]) -> Dict[str, List[str]]: return grouped_tasks -def create_collection_status(logger): - with DatabaseSession(logger) as session: - query = s.sql.text(""" - SELECT repo_id FROM repo where repo_id NOT IN (SELECT repo_id FROM augur_operations.collection_status) - """) - - repos = session.execute_sql(query).fetchall() - - for repo in repos: - CollectionStatus.insert(session,repo[0]) @celery_app.on_after_finalize.connect @@ -131,7 +121,6 @@ def setup_periodic_tasks(sender, **kwargs): """ from augur.tasks.start_tasks import augur_collection_monitor from augur.tasks.start_tasks import non_repo_domain_tasks - with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: @@ -141,12 +130,6 @@ def setup_periodic_tasks(sender, **kwargs): logger.info(f"Scheduling collection every {collection_interval/60} minutes") sender.add_periodic_task(collection_interval, augur_collection_monitor.s()) - #Do longer tasks less often - non_domain_collection_interval = collection_interval * 5 - logger.info(f"Scheduling non-repo-domain collection every {non_domain_collection_interval/60} minutes") - sender.add_periodic_task(non_domain_collection_interval, non_repo_domain_tasks().s) - - @after_setup_logger.connect def setup_loggers(*args,**kwargs): """Override Celery loggers with our own.""" From 6f98d7dbfb83acb87dc46f7f9c73f872741641ff Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 30 Jan 2023 21:22:08 -0600 Subject: [PATCH 081/134] reduce max repos run at a time Signed-off-by: Isaac Milarsky --- augur/tasks/start_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 773ec2c0d0..f01bbc4dff 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -296,7 +296,7 @@ def augur_collection_monitor(): #Get phase options from the config with DatabaseSession(logger, engine) as session: - max_repo_count = 500 + max_repo_count = 50 days = 30 config = AugurConfig(logger, session) From 086a342069b26524c872cfaa5ee7ee0f8023a3b1 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Wed, 1 Feb 2023 14:31:17 -0600 Subject: [PATCH 082/134] 8knot fixes (#2164) * Fixes to various frontend issues Signed-off-by: Andrew Brain * Fix deleting repo Signed-off-by: Andrew Brain * Temp fixes Signed-off-by: Andrew Brain * Fix session passing Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Possible fix Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Fix dumb error Signed-off-by: Andrew Brain * Fix session out of scope issue Signed-off-by: Andrew Brain * Fixes to user creation Signed-off-by: Andrew Brain * Fix small error Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --------- Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/api/view/augur_view.py | 5 +++-- augur/api/view/routes.py | 4 ++-- augur/application/db/models/augur_operations.py | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 3afa315a11..7124067ce5 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -63,6 +63,9 @@ def load_user(user_id): db_session = DatabaseSession(logger) user = User.get_user(db_session, user_id) + if not user: + return None + groups = user.groups tokens = user.tokens applications = user.applications @@ -74,8 +77,6 @@ def load_user(user_id): application = token.application db_session.expunge(user) - if not user: - return None # The flask_login library sets a unique session["_id"] # when login_user() is called successfully diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 78a461a1d6..fd6b32b755 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -171,11 +171,11 @@ def user_login(): last_name = request.form.get('last_name') admin = request.form.get('admin') or False - result = User.create_user(db_session, username, password, email, first_name, last_name, admin) + result = User.create_user(username, password, email, first_name, last_name, admin) if not result[0]: raise LoginException("An error occurred registering your account") else: - user = User.get_user(username) + user = User.get_user(db_session, username) flash(result[1]["status"]) # Log the user in if the password is valid diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index 3f3b8566f7..9cdbd621f7 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -322,7 +322,7 @@ def create_user(username: str, password: str, email: str, first_name:str, last_n session.add(user) session.commit() - result = user.add_group(session, "default") + result = user.add_group("default") if not result[0] and result[1]["status"] != "Group already exists": return False, {"status": "Failed to add default group for the user"} From ac8e2828bd0b4b712405c483a4549fe2ade73653 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 2 Feb 2023 09:51:10 -0600 Subject: [PATCH 083/134] Change workers and queues Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/cli/backend.py | 4 ++-- augur/tasks/init/celery_app.py | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 7b75a166da..4e42e9d9f0 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -87,8 +87,8 @@ def start(disable_collection, development, port): logger.info("Deleting old task schedule") os.remove("celerybeat-schedule.db") - worker_1 = f"celery -A augur.tasks.init.celery_app.celery_app worker -P eventlet -l info --concurrency=100 -n {uuid.uuid4().hex}@%h" - cpu_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h -Q cpu" + worker_1 = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=1 -n scheduling:{uuid.uuid4().hex}@%h -Q scheduling" + cpu_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h" worker_1_process = subprocess.Popen(worker_1.split(" ")) cpu_worker_process = subprocess.Popen(cpu_worker.split(" ")) diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 95be3412da..17d59d3e9e 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -59,7 +59,7 @@ # define the queues that tasks will be put in (by default tasks are put in celery queue) celery_app.conf.task_routes = { - 'augur.tasks.git.facade_tasks.*': {'queue': 'cpu'} + 'augur.tasks.start_tasks.*': {'queue': 'scheduling'} } #Setting to be able to see more detailed states of running tasks @@ -137,6 +137,8 @@ def setup_periodic_tasks(sender, **kwargs): config = AugurConfig(logger, session) + print(augur_collection_monitor) + collection_interval = config.get_value('Tasks', 'collection_interval') logger.info(f"Scheduling collection every {collection_interval/60} minutes") sender.add_periodic_task(collection_interval, augur_collection_monitor.s()) @@ -144,7 +146,7 @@ def setup_periodic_tasks(sender, **kwargs): #Do longer tasks less often non_domain_collection_interval = collection_interval * 5 logger.info(f"Scheduling non-repo-domain collection every {non_domain_collection_interval/60} minutes") - sender.add_periodic_task(non_domain_collection_interval, non_repo_domain_tasks().s) + sender.add_periodic_task(non_domain_collection_interval, non_repo_domain_tasks.s()) @after_setup_logger.connect @@ -186,7 +188,7 @@ def init_eventlet_worker(**kwargs): from augur.application.db.engine import DatabaseEngine engine = DatabaseEngine(pool_size=40, max_overflow=50, pool_timeout=60).engine - logger.info(f"Creating database engine for worker. Engine: {id(engine)}") + logger.info(f"Creating database engine for eventlet worker. Engine: {id(engine)}") @eventlet_pool_postshutdown.connect def shutdown_eventlet_worker(**kwargs): From d2444cc5a54406f4505a27a19327b3e8b7bbe97b Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 2 Feb 2023 09:54:39 -0600 Subject: [PATCH 084/134] exponential backoff when catching operational errors Signed-off-by: Isaac Milarsky --- augur/application/db/util.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/augur/application/db/util.py b/augur/application/db/util.py index 153b3e2deb..9fa49ab00d 100644 --- a/augur/application/db/util.py +++ b/augur/application/db/util.py @@ -7,12 +7,16 @@ def catch_operational_error(func): attempts = 0 error = None + timeout = 240 + while attempts < 4: # do the sleep here instead of instead of in the exception # so it doesn't sleep after the last failed time if attempts > 0: - time.sleep(240) + #Do a 30% exponential backoff + time.sleep(timeout) + timeout = int(timeout * 1.3) try: return func() except OperationalError as e: From 3d1dcc801570531cd807d62d116e88c3f816a0b2 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 2 Feb 2023 10:05:05 -0600 Subject: [PATCH 085/134] Fix dev engine connections (#2162) * Add handlers for the eventlet worker engine creation and deletion Signed-off-by: Andrew Brain * Fix merge conflicts Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Improve connections Signed-off-by: Andrew Brain * Make DatabaseEngine class take all params create_engine does Signed-off-by: Andrew Brain * Fix merge conflicts Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * adding traceback to messages task Signed-off-by: Sean Goggins * Turn off gossip and mingle for workers Signed-off-by: Andrew Brain * Turn off celery broker heartbeat Signed-off-by: Andrew Brain * Fix Signed-off-by: Andrew Brain * Revert Signed-off-by: Andrew Brain * Remove eventlet worker Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Up pool size and timeout Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --------- Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Signed-off-by: Sean Goggins Co-authored-by: Sean Goggins --- augur/application/cli/backend.py | 14 +- augur/application/db/engine.py | 19 ++- augur/application/db/session.py | 6 +- augur/application/util.py | 5 +- augur/tasks/data_analysis/__init__.py | 3 +- .../data_analysis/clustering_worker/tasks.py | 4 +- .../contributor_breadth_worker.py | 8 +- .../data_analysis/discourse_analysis/tasks.py | 7 +- .../data_analysis/insight_worker/tasks.py | 84 +++++------ .../data_analysis/message_insights/tasks.py | 19 ++- .../pull_request_analysis_worker/tasks.py | 26 ++-- augur/tasks/db/refresh_materialized_views.py | 4 +- augur/tasks/git/dependency_tasks/core.py | 2 +- augur/tasks/git/dependency_tasks/tasks.py | 4 +- augur/tasks/git/facade_tasks.py | 55 +++++++- .../facade_worker/facade00mainprogram.py | 1 - .../facade_worker/facade01config.py | 4 +- augur/tasks/github/contributors/tasks.py | 4 +- augur/tasks/github/detect_move/tasks.py | 10 +- augur/tasks/github/events/tasks.py | 8 +- augur/tasks/github/facade_github/tasks.py | 5 +- augur/tasks/github/issues/tasks.py | 14 +- augur/tasks/github/messages/tasks.py | 11 +- .../pull_requests/commits_model/core.py | 3 +- .../pull_requests/commits_model/tasks.py | 9 +- augur/tasks/github/pull_requests/core.py | 9 +- .../github/pull_requests/files_model/core.py | 131 +++++++++--------- .../github/pull_requests/files_model/tasks.py | 7 +- augur/tasks/github/pull_requests/tasks.py | 16 ++- augur/tasks/github/releases/tasks.py | 4 +- augur/tasks/github/repo_info/tasks.py | 4 +- .../github/util/github_random_key_auth.py | 1 - augur/tasks/init/__init__.py | 5 +- augur/tasks/init/celery_app.py | 16 ++- augur/tasks/init/celery_worker.py | 4 + augur/tasks/start_tasks.py | 13 +- 36 files changed, 338 insertions(+), 201 deletions(-) create mode 100644 augur/tasks/init/celery_worker.py diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index a80701e0c1..a3c23f4959 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -86,11 +86,9 @@ def start(disable_collection, development, port): logger.info("Deleting old task schedule") os.remove("celerybeat-schedule.db") - worker_1 = f"celery -A augur.tasks.init.celery_app.celery_app worker -P eventlet -l info --concurrency=100 -n {uuid.uuid4().hex}@%h" - cpu_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h -Q cpu" - worker_1_process = subprocess.Popen(worker_1.split(" ")) + worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h" - cpu_worker_process = subprocess.Popen(cpu_worker.split(" ")) + worker_process = subprocess.Popen(worker.split(" ")) time.sleep(5) start_task.si().apply_async() @@ -109,13 +107,9 @@ def start(disable_collection, development, port): logger.info("Shutting down server") server.terminate() - if worker_1_process: + if worker_process: logger.info("Shutting down celery process") - worker_1_process.terminate() - - if cpu_worker_process: - logger.info("Shutting down celery process") - cpu_worker_process.terminate() + worker_process.terminate() if celery_beat_process: logger.info("Shutting down celery beat process") diff --git a/augur/application/db/engine.py b/augur/application/db/engine.py index 9d2a2621fc..5cb01ee0c4 100644 --- a/augur/application/db/engine.py +++ b/augur/application/db/engine.py @@ -52,9 +52,14 @@ def get_database_string() -> str: class DatabaseEngine(): - def __init__(self, connection_pool_size=5): + def __init__(self, **kwargs): - self._engine = self.create_database_engine(connection_pool_size) + pool_size = kwargs.get("connection_pool_size") + if pool_size: + del kwargs["connection_pool_size"] + kwargs["pool_size"] = pool_size + + self._engine = self.create_database_engine(**kwargs) def __enter__(self): @@ -73,7 +78,7 @@ def engine(self): return self._engine - def create_database_engine(self, connection_pool_size): + def create_database_engine(self, **kwargs): """Create sqlalchemy database engine Note: @@ -90,13 +95,7 @@ def create_database_engine(self, connection_pool_size): db_conn_string = get_database_string() - if connection_pool_size == 1: - engine = create_engine(db_conn_string, poolclass=NullPool) - - elif connection_pool_size < 0: - raise Exception(f"Invalid Pool Size: {connection_pool_size}") - else: - engine = create_engine(db_conn_string, pool_size=connection_pool_size) + engine = create_engine(db_conn_string, **kwargs) @event.listens_for(engine, "connect", insert=True) def set_search_path(dbapi_connection, connection_record): diff --git a/augur/application/db/session.py b/augur/application/db/session.py index edaa16a70a..2bf037108d 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -51,7 +51,7 @@ def remove_null_characters_from_list_of_dicts(data_list, fields): class DatabaseSession(Session): - def __init__(self, logger, engine=None): + def __init__(self, logger, engine=None, from_msg=None): self.logger = logger self.engine = engine @@ -63,6 +63,10 @@ def __init__(self, logger, engine=None): self.engine_created = True self.engine = DatabaseEngine().engine + if from_msg: + logger.debug(f"ENGINE CREATE: {from_msg}") + else: + logger.debug(f"ENGINE CREATE") super().__init__(self.engine) diff --git a/augur/application/util.py b/augur/application/util.py index 9478ab5a02..46ce99d9da 100644 --- a/augur/application/util.py +++ b/augur/application/util.py @@ -1,13 +1,14 @@ import logging from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.util.repo_load_controller import RepoLoadController logger = logging.getLogger(__name__) def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: controller = RepoLoadController(session) @@ -17,7 +18,7 @@ def get_all_repos(page=0, page_size=25, sort="repo_id", direction="ASC"): def get_all_repos_count(): - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: controller = RepoLoadController(session) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 3324137668..5629644f98 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -14,10 +14,11 @@ def machine_learning_phase(): from augur.tasks.data_analysis.message_insights.tasks import message_insight_model from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model + from augur.tasks.init.celery_app import engine logger = logging.getLogger(machine_learning_phase.__name__) - with DatabaseSession(logger) as session: + with DatabaseSession(logger, engine) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index 649fc81cfb..91f01912ef 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -19,7 +19,7 @@ from textblob import TextBlob from collections import Counter -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, RepoClusterMessage, RepoTopic, TopicWord @@ -34,6 +34,8 @@ @celery.task def clustering_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(clustering_model.__name__) logger.info(f"Starting clustering analysis for {repo_git}") diff --git a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py index 183290edd4..1695dc935b 100644 --- a/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py +++ b/augur/tasks/data_analysis/contributor_breadth_worker/contributor_breadth_worker.py @@ -3,7 +3,7 @@ import pandas as pd import sqlalchemy as s -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.tasks.github.util.github_paginator import GithubPaginator from augur.application.db.models import ContributorRepo @@ -25,6 +25,8 @@ @celery.task def contributor_breadth_model() -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(contributor_breadth_model.__name__) tool_source = 'Contributor Breadth Worker' @@ -43,8 +45,8 @@ def contributor_breadth_model() -> None: WHERE gh_login IS NOT NULL """) - with DatabaseEngine(connection_pool_size=1) as engine: - current_cntrb_logins = json.loads(pd.read_sql(cntrb_login_query, engine, params={}).to_json(orient="records")) + + current_cntrb_logins = json.loads(pd.read_sql(cntrb_login_query, engine, params={}).to_json(orient="records")) ## We need a list of all contributors so we can iterate through them to gather events ## We need a list of event ids to avoid insertion of duplicate events. We ignore the event diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index b11b711e2d..c14d9146da 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -6,7 +6,7 @@ import nltk from collections import Counter -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.db.models import Repo, DiscourseInsight from augur.application.db.engine import DatabaseEngine @@ -34,6 +34,8 @@ @celery.task def discourse_analysis_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(discourse_analysis_model.__name__) tool_source = 'Discourse Worker' @@ -64,8 +66,7 @@ def discourse_analysis_model(repo_git: str) -> None: """) # result = db.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - with DatabaseEngine(connection_pool_size=1) as engine: - msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, engine, params={"repo_id": repo_id}) + msg_df_cur_repo = pd.read_sql(get_messages_for_repo_sql, engine, params={"repo_id": repo_id}) msg_df_cur_repo = msg_df_cur_repo.sort_values(by=['thread_id']).reset_index(drop=True) logger.info(msg_df_cur_repo.head()) diff --git a/augur/tasks/data_analysis/insight_worker/tasks.py b/augur/tasks/data_analysis/insight_worker/tasks.py index 16f8cfaeba..aefc849744 100644 --- a/augur/tasks/data_analysis/insight_worker/tasks.py +++ b/augur/tasks/data_analysis/insight_worker/tasks.py @@ -13,7 +13,7 @@ from sklearn.ensemble import IsolationForest import warnings -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, ChaossMetricStatus, RepoInsight, RepoInsightsRecord @@ -26,6 +26,8 @@ @celery.task def insight_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(insight_model.__name__) refresh = True @@ -98,46 +100,45 @@ def insight_model(repo_git: str) -> None: return """ Deletion of old insights """ - with DatabaseEngine(connection_pool_size=1) as engine: - # Delete previous insights not in the anomaly_days param - min_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) - logger.info("MIN DATE: {}\n".format(min_date)) - logger.info("Deleting out of date records ...\n") - delete_record_SQL = s.sql.text(""" - DELETE - FROM - repo_insights_records - WHERE - repo_id = :repo_id + # Delete previous insights not in the anomaly_days param + min_date = datetime.datetime.now() - datetime.timedelta(days=anomaly_days) + logger.info("MIN DATE: {}\n".format(min_date)) + logger.info("Deleting out of date records ...\n") + delete_record_SQL = s.sql.text(""" + DELETE + FROM + repo_insights_records + WHERE + repo_id = :repo_id + AND ri_date < :min_date + """) + result = engine.execute(delete_record_SQL, repo_id=repo_id, min_date=min_date) + + logger.info("Deleting out of date data points ...\n") + delete_points_SQL = s.sql.text(""" + DELETE + FROM + repo_insights + USING ( + SELECT ri_metric, ri_field + FROM ( + SELECT * + FROM repo_insights + WHERE ri_fresh = TRUE + AND repo_id = :repo_id AND ri_date < :min_date - """) - result = engine.execute(delete_record_SQL, repo_id=repo_id, min_date=min_date) + ) old_insights + ) to_delete + WHERE repo_insights.ri_metric = to_delete.ri_metric + AND repo_insights.ri_field = to_delete.ri_field + """) + result = engine.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) + + # get table values to check for dupes later on - logger.info("Deleting out of date data points ...\n") - delete_points_SQL = s.sql.text(""" - DELETE - FROM - repo_insights - USING ( - SELECT ri_metric, ri_field - FROM ( - SELECT * - FROM repo_insights - WHERE ri_fresh = TRUE - AND repo_id = :repo_id - AND ri_date < :min_date - ) old_insights - ) to_delete - WHERE repo_insights.ri_metric = to_delete.ri_metric - AND repo_insights.ri_field = to_delete.ri_field - """) - result = engine.execute(delete_points_SQL, repo_id=repo_id, min_date=min_date) - - # get table values to check for dupes later on - - - table_values_sql = s.sql.text("""SELECT * FROM repo_insights_records WHERE repo_id={}""".format(repo_id)) - insight_table_values = pd.read_sql(table_values_sql, engine, params={}) + + table_values_sql = s.sql.text("""SELECT * FROM repo_insights_records WHERE repo_id={}""".format(repo_id)) + insight_table_values = pd.read_sql(table_values_sql, engine, params={}) to_model_columns = df.columns[0:len(metrics) + 1] @@ -308,6 +309,8 @@ def confidence_interval_insights(logger): """ Anomaly detection method based on confidence intervals """ + from augur.tasks.init.celery_app import engine + # Update table of endpoints before we query them all logger.info("Discovering insights for task with entry info: {}".format(entry_info)) @@ -699,6 +702,9 @@ def confidence_interval(data, logger, timeperiod='week', confidence=.95, ): return m, m - h, m + h def update_metrics(api_host, api_port, tool_source, tool_version, logger): + + from augur.tasks.init.celery_app import engine + logger.info("Preparing to update metrics ...\n\n" + "Hitting endpoint: http://{}:{}/api/unstable/metrics/status ...\n".format( api_host, api_port)) diff --git a/augur/tasks/data_analysis/message_insights/tasks.py b/augur/tasks/data_analysis/message_insights/tasks.py index 6a34944473..5377be36db 100644 --- a/augur/tasks/data_analysis/message_insights/tasks.py +++ b/augur/tasks/data_analysis/message_insights/tasks.py @@ -11,7 +11,7 @@ from augur.tasks.data_analysis.message_insights.message_novelty import novelty_analysis from augur.tasks.data_analysis.message_insights.message_sentiment import get_senti_score -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, MessageAnalysis, MessageAnalysisSummary @@ -25,6 +25,8 @@ @celery.task def message_insight_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(message_insight_model.__name__) full_train = True @@ -53,8 +55,7 @@ def message_insight_model(repo_git: str) -> None: repo_exists_SQL = s.sql.text(""" SELECT exists (SELECT 1 FROM augur_data.message_analysis_summary WHERE repo_id = :repo_id LIMIT 1)""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_rep = pd.read_sql_query(repo_exists_SQL, engine, params={'repo_id': repo_id}) + df_rep = pd.read_sql_query(repo_exists_SQL, engine, params={'repo_id': repo_id}) #full_train = not(df_rep['exists'].iloc[0]) logger.info(f'Full Train: {full_train}') @@ -79,8 +80,7 @@ def message_insight_model(repo_git: str) -> None: where message.repo_id = :repo_id """) - with DatabaseEngine(connection_pool_size=1) as engine: - df_past = pd.read_sql_query(past_SQL, engine, params={'repo_id': repo_id}) + df_past = pd.read_sql_query(past_SQL, engine, params={'repo_id': repo_id}) df_past['msg_timestamp'] = pd.to_datetime(df_past['msg_timestamp']) df_past = df_past.sort_values(by='msg_timestamp') @@ -120,8 +120,7 @@ def message_insight_model(repo_git: str) -> None: left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where message.repo_id = :repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_message = pd.read_sql_query(join_SQL, engine, params={'repo_id': repo_id, 'begin_date': begin_date}) + df_message = pd.read_sql_query(join_SQL, engine, params={'repo_id': repo_id, 'begin_date': begin_date}) logger.info(f'Messages dataframe dim: {df_message.shape}') logger.info(f'Value 1: {df_message.shape[0]}') @@ -156,8 +155,7 @@ def message_insight_model(repo_git: str) -> None: left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where issue_message_ref.repo_id = :repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_past = pd.read_sql_query(merge_SQL, engine, params={'repo_id': repo_id}) + df_past = pd.read_sql_query(merge_SQL, engine, params={'repo_id': repo_id}) df_past = df_past.loc[df_past['novelty_flag'] == 0] rec_errors = df_past['reconstruction_error'].tolist() threshold = threshold_otsu(np.array(rec_errors)) @@ -345,8 +343,7 @@ def message_insight_model(repo_git: str) -> None: FROM message_analysis_summary WHERE repo_id=:repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_past = pd.read_sql_query(message_analysis_query, engine, params={'repo_id': repo_id}) + df_past = pd.read_sql_query(message_analysis_query, engine, params={'repo_id': repo_id}) # df_past = get_table_values(cols=['period', 'positive_ratio', 'negative_ratio', 'novel_count'], # tables=['message_analysis_summary'], diff --git a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py index d6cd816782..76b0514964 100644 --- a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py +++ b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py @@ -8,7 +8,7 @@ from augur.tasks.data_analysis.message_insights.message_sentiment import get_senti_score -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession from augur.application.config import AugurConfig from augur.application.db.models import Repo, PullRequestAnalysis @@ -24,6 +24,8 @@ @celery.task def pull_request_analysis_model(repo_git: str) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(pull_request_analysis_model.__name__) tool_source = 'Pull Request Analysis Worker' @@ -65,8 +67,7 @@ def pull_request_analysis_model(repo_git: str) -> None: and pr_src_state like 'open' """) - with DatabaseEngine(connection_pool_size=1) as engine: - df_pr = pd.read_sql_query(pr_SQL, engine, params={'begin_date': begin_date, 'repo_id': repo_id}) + df_pr = pd.read_sql_query(pr_SQL, engine, params={'begin_date': begin_date, 'repo_id': repo_id}) logger.info(f'PR Dataframe dim: {df_pr.shape}\n') @@ -98,16 +99,14 @@ def pull_request_analysis_model(repo_git: str) -> None: left outer join augur_data.issue_message_ref on message.msg_id = issue_message_ref.msg_id left outer join augur_data.issues on issue_message_ref.issue_id = issues.issue_id where issue_message_ref.repo_id = :repo_id""") - with DatabaseEngine(connection_pool_size=1) as engine: - df_message = pd.read_sql_query(messages_SQL, engine, params={'repo_id': repo_id}) + df_message = pd.read_sql_query(messages_SQL, engine, params={'repo_id': repo_id}) logger.info(f'Mapping messages to PR, find comment & participants counts') # Map PR to its corresponding messages - with DatabaseEngine(connection_pool_size=1) as engine: - pr_ref_sql = s.sql.text("select * from augur_data.pull_request_message_ref") - df_pr_ref = pd.read_sql_query(pr_ref_sql, engine) + pr_ref_sql = s.sql.text("select * from augur_data.pull_request_message_ref") + df_pr_ref = pd.read_sql_query(pr_ref_sql, engine) df_merge = pd.merge(df_pr, df_pr_ref, on='pull_request_id', how='left') df_merge = pd.merge(df_merge, df_message, on='msg_id', how='left') df_merge = df_merge.dropna(subset=['msg_id'], axis=0) @@ -156,12 +155,11 @@ def pull_request_analysis_model(repo_git: str) -> None: logger.info(f'Fetching repo statistics') # Get repo info - with DatabaseEngine(connection_pool_size=1) as engine: - repo_sql = s.sql.text(""" - SELECT repo_id, pull_requests_merged, pull_request_count,watchers_count, last_updated FROM - augur_data.repo_info where repo_id = :repo_id - """) - df_repo = pd.read_sql_query(repo_sql, engine, params={'repo_id': repo_id}) + repo_sql = s.sql.text(""" + SELECT repo_id, pull_requests_merged, pull_request_count,watchers_count, last_updated FROM + augur_data.repo_info where repo_id = :repo_id + """) + df_repo = pd.read_sql_query(repo_sql, engine, params={'repo_id': repo_id}) df_repo = df_repo.loc[df_repo.groupby('repo_id').last_updated.idxmax(), :] df_repo = df_repo.drop(['last_updated'], axis=1) diff --git a/augur/tasks/db/refresh_materialized_views.py b/augur/tasks/db/refresh_materialized_views.py index 4c15434da2..53b29ddbd2 100644 --- a/augur/tasks/db/refresh_materialized_views.py +++ b/augur/tasks/db/refresh_materialized_views.py @@ -4,13 +4,15 @@ from celery import signature from celery import group, chain, chord, signature -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.session import DatabaseSession @celery.task def refresh_materialized_views(): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(refresh_materialized_views.__name__) refresh_view_query = s.sql.text(""" diff --git a/augur/tasks/git/dependency_tasks/core.py b/augur/tasks/git/dependency_tasks/core.py index 0be1530663..8bd1be13b1 100644 --- a/augur/tasks/git/dependency_tasks/core.py +++ b/augur/tasks/git/dependency_tasks/core.py @@ -6,7 +6,6 @@ from augur.application.db.data_parse import * from augur.application.db.models import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.application.config import AugurConfig from augur.application.db.util import execute_session_query from augur.tasks.git.dependency_tasks.dependency_util import dependency_calculator as dep_calc @@ -16,6 +15,7 @@ def generate_deps_data(session, repo_id, path): :param repo_id: Repository ID :param path: Absolute path of the Repostiory """ + session.logger.info('Searching for deps in repo') session.logger.info(f'Repo ID: {repo_id}, Path: {path}') diff --git a/augur/tasks/git/dependency_tasks/tasks.py b/augur/tasks/git/dependency_tasks/tasks.py index 9dcb8b3463..746754c3bf 100644 --- a/augur/tasks/git/dependency_tasks/tasks.py +++ b/augur/tasks/git/dependency_tasks/tasks.py @@ -10,9 +10,11 @@ def process_dependency_metrics(repo_git_identifiers): #raise NotImplementedError + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_dependency_metrics.__name__) - session = DatabaseSession(logger) + session = DatabaseSession(logger, engine) for repo_git in repo_git_identifiers: query = session.query(Repo).filter(Repo.repo_git == repo_git) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index d9daf4571b..8ba559b4d0 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -29,7 +29,7 @@ from augur.tasks.util.worker_util import create_grouped_task_load -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db import data_parse @@ -46,6 +46,9 @@ #if it does. @celery.task def facade_error_handler(request,exc,traceback): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_error_handler.__name__) logger.error(f"Task {request.id} raised exception: {exc}! \n {traceback}") @@ -64,6 +67,9 @@ def facade_error_handler(request,exc,traceback): #Predefine facade collection with tasks @celery.task def facade_analysis_init_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_analysis_init_facade_task.__name__) with FacadeSession(logger) as session: session.update_status('Running analysis') @@ -71,6 +77,9 @@ def facade_analysis_init_facade_task(): @celery.task def grab_comitters(repo_id_list,platform="github"): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(grab_comitters.__name__) for repo_id in repo_id_list: @@ -82,6 +91,9 @@ def grab_comitters(repo_id_list,platform="github"): @celery.task def trim_commits_facade_task(repo_id_list): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(trim_commits_facade_task.__name__) session = FacadeSession(logger) @@ -130,6 +142,9 @@ def update_analysis_log(repos_id,status): @celery.task def trim_commits_post_analysis_facade_task(repo_ids): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) @@ -216,6 +231,9 @@ def update_analysis_log(repos_id,status): @celery.task def facade_analysis_end_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_analysis_end_facade_task.__name__) FacadeSession(logger).log_activity('Info','Running analysis (complete)') @@ -223,6 +241,9 @@ def facade_analysis_end_facade_task(): @celery.task def facade_start_contrib_analysis_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(facade_start_contrib_analysis_task.__name__) session = FacadeSession(logger) session.update_status('Updating Contributors') @@ -235,6 +256,8 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: """Take a large list of commit data to analyze and store in the database. Meant to be run in parallel with other instances of this task. """ + from augur.tasks.init.celery_app import engine + #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) # TODO: Is this session ever closed? @@ -323,6 +346,9 @@ def analyze_commits_in_parallel(repo_ids, multithreaded: bool)-> None: @celery.task def nuke_affiliations_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(nuke_affiliations_facade_task.__name__) # TODO: Is this session ever closed? session = FacadeSession(logger) @@ -331,12 +357,18 @@ def nuke_affiliations_facade_task(): @celery.task def fill_empty_affiliations_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(fill_empty_affiliations_facade_task.__name__) with FacadeSession(logger) as session: fill_empty_affiliations(session) @celery.task def invalidate_caches_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(invalidate_caches_facade_task.__name__) with FacadeSession(logger) as session: @@ -344,6 +376,9 @@ def invalidate_caches_facade_task(): @celery.task def rebuild_unknown_affiliation_and_web_caches_facade_task(): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(rebuild_unknown_affiliation_and_web_caches_facade_task.__name__) with FacadeSession(logger) as session: @@ -351,6 +386,9 @@ def rebuild_unknown_affiliation_and_web_caches_facade_task(): @celery.task def force_repo_analysis_facade_task(repo_git_identifiers): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(force_repo_analysis_facade_task.__name__) with FacadeSession(logger) as session: @@ -358,6 +396,9 @@ def force_repo_analysis_facade_task(repo_git_identifiers): @celery.task def git_repo_cleanup_facade_task(repo_git_identifiers): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(git_repo_cleanup_facade_task.__name__) with FacadeSession(logger) as session: @@ -365,6 +406,9 @@ def git_repo_cleanup_facade_task(repo_git_identifiers): @celery.task def git_repo_initialize_facade_task(repo_git_identifiers): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(git_repo_initialize_facade_task.__name__) with FacadeSession(logger) as session: @@ -372,6 +416,9 @@ def git_repo_initialize_facade_task(repo_git_identifiers): @celery.task def check_for_repo_updates_facade_task(repo_git_identifiers): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(check_for_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: @@ -379,6 +426,9 @@ def check_for_repo_updates_facade_task(repo_git_identifiers): @celery.task def force_repo_updates_facade_task(repo_git_identifiers): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(force_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: @@ -386,6 +436,9 @@ def force_repo_updates_facade_task(repo_git_identifiers): @celery.task def git_repo_updates_facade_task(repo_git_identifiers): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(git_repo_updates_facade_task.__name__) with FacadeSession(logger) as session: diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py b/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py index 2ac69e719c..a6d0e6b386 100755 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade00mainprogram.py @@ -42,7 +42,6 @@ from augur.tasks.github.facade_github.contributor_interfaceable.contributor_interface import * from augur.tasks.github.util.github_task_session import GithubTaskSession -from augur.tasks.init.celery_app import engine from logging import Logger from sqlalchemy.sql.elements import TextClause diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py index f0f16b3d38..ca58994411 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py @@ -111,9 +111,11 @@ class FacadeSession(GithubTaskSession): create_xlsx_summary_files (int): toggles whether to create excel summary files """ def __init__(self,logger: Logger): + + from augur.tasks.init.celery_app import engine #self.cfg = FacadeConfig(logger) self.repos_processed = 0 - super().__init__(logger=logger) + super().__init__(logger=logger, engine=engine) # Figure out what we need to do worker_options = AugurConfig(logger, self).get_section("Facade") diff --git a/augur/tasks/github/contributors/tasks.py b/augur/tasks/github/contributors/tasks.py index 38967edc4f..0304abca45 100644 --- a/augur/tasks/github/contributors/tasks.py +++ b/augur/tasks/github/contributors/tasks.py @@ -2,7 +2,7 @@ import logging -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -14,6 +14,8 @@ @celery.task def process_contributors(): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_contributors.__name__) tool_source = "Contributors task" diff --git a/augur/tasks/github/detect_move/tasks.py b/augur/tasks/github/detect_move/tasks.py index 69c268a001..2efaa1ce55 100644 --- a/augur/tasks/github/detect_move/tasks.py +++ b/augur/tasks/github/detect_move/tasks.py @@ -1,12 +1,16 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.detect_move.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query +import traceback -@celery.task -def detect_github_repo_move(repo_git_identifiers : [str]) -> None: +@celery.task() +def detect_github_repo_move(repo_git_identifiers : str) -> None: + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(detect_github_repo_move.__name__) logger.info(f"Starting repo_move operation with {repo_git_identifiers}") diff --git a/augur/tasks/github/events/tasks.py b/augur/tasks/github/events/tasks.py index 17e1efd917..cc4fbafa66 100644 --- a/augur/tasks/github/events/tasks.py +++ b/augur/tasks/github/events/tasks.py @@ -2,7 +2,7 @@ import logging import traceback -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -18,6 +18,8 @@ @celery.task def collect_events(repo_git_identifiers: [str]): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_events.__name__) with DatabaseSession(logger, engine) as session: @@ -50,6 +52,8 @@ def collect_events(repo_git_identifiers: [str]): def retrieve_all_event_data(repo_git: str, logger): + from augur.tasks.init.celery_app import engine + owner, repo = get_owner_repo(repo_git) logger.info(f"Collecting Github events for {owner}/{repo}") @@ -81,6 +85,8 @@ def retrieve_all_event_data(repo_git: str, logger): return all_data def process_events(events, task_name, repo_id, logger): + + from augur.tasks.init.celery_app import engine tool_source = "Github events task" tool_version = "2.0" diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 74c2aa139c..11fd9484c3 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -2,7 +2,7 @@ import logging -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -226,6 +226,9 @@ def link_commits_to_contributor(session,contributorQueue): # Update the contributors table from the data facade has gathered. @celery.task def insert_facade_contributors(repo_id_list): + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(insert_facade_contributors.__name__) with GithubTaskSession(logger, engine) as session: diff --git a/augur/tasks/github/issues/tasks.py b/augur/tasks/github/issues/tasks.py index 416ddfc22d..4d359b1c9f 100644 --- a/augur/tasks/github/issues/tasks.py +++ b/augur/tasks/github/issues/tasks.py @@ -6,7 +6,7 @@ from sqlalchemy.exc import IntegrityError -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -21,9 +21,13 @@ @celery.task def collect_issues(repo_git_identifiers: [str]) -> None: + from augur.tasks.init.celery_app import engine + + print(f"Eventlet engine id: {id(engine)}") + logger = logging.getLogger(collect_issues.__name__) - with DatabaseSession(logger, engine) as session: + with GithubTaskSession(logger, engine) as session: for repo_git in repo_git_identifiers: try: @@ -48,6 +52,10 @@ def collect_issues(repo_git_identifiers: [str]) -> None: def retrieve_all_issue_data(repo_git, logger) -> None: + from augur.tasks.init.celery_app import engine + + print(f"Eventlet engine id: {id(engine)}") + owner, repo = get_owner_repo(repo_git) logger.info(f"Collecting issues for {owner}/{repo}") @@ -85,6 +93,8 @@ def retrieve_all_issue_data(repo_git, logger) -> None: return all_data def process_issues(issues, task_name, repo_id, logger) -> None: + + from augur.tasks.init.celery_app import engine # get repo_id or have it passed tool_source = "Issue Task" diff --git a/augur/tasks/github/messages/tasks.py b/augur/tasks/github/messages/tasks.py index 537d273984..e3c5d3380d 100644 --- a/augur/tasks/github/messages/tasks.py +++ b/augur/tasks/github/messages/tasks.py @@ -1,8 +1,9 @@ import time import logging +import traceback -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -20,6 +21,8 @@ @celery.task def collect_github_messages(repo_git_identifiers: [str]) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_github_messages.__name__) with DatabaseSession(logger, engine) as session: @@ -45,6 +48,8 @@ def collect_github_messages(repo_git_identifiers: [str]) -> None: def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: + from augur.tasks.init.celery_app import engine + owner, repo = get_owner_repo(repo_git) # url to get issue and pull request comments @@ -85,6 +90,8 @@ def retrieve_all_pr_and_issue_messages(repo_git: str, logger) -> None: def process_messages(messages, task_name, repo_id, logger): + from augur.tasks.init.celery_app import engine + tool_source = "Pr comment task" tool_version = "2.0" data_source = "Github API" @@ -233,4 +240,4 @@ def process_github_comment_contributors(message, tool_source, tool_version, data # This is done by searching all the dicts for the given key that has the specified value def find_dict_in_list_of_dicts(data, key, value): - return next((item for item in data if item[key] == value), None) \ No newline at end of file + return next((item for item in data if item[key] == value), None) diff --git a/augur/tasks/github/pull_requests/commits_model/core.py b/augur/tasks/github/pull_requests/commits_model/core.py index cc9b277889..6e7d7bb22c 100644 --- a/augur/tasks/github/pull_requests/commits_model/core.py +++ b/augur/tasks/github/pull_requests/commits_model/core.py @@ -4,7 +4,6 @@ import sqlalchemy as s from augur.application.db.data_parse import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.application.db.models import * @@ -13,6 +12,8 @@ def pull_request_commits_model(repo_id,logger): + + from augur.tasks.init.celery_app import engine # query existing PRs and the respective url we will append the commits url to pr_url_sql = s.sql.text(""" diff --git a/augur/tasks/github/pull_requests/commits_model/tasks.py b/augur/tasks/github/pull_requests/commits_model/tasks.py index 93e3eaba99..d4a31c5c9e 100644 --- a/augur/tasks/github/pull_requests/commits_model/tasks.py +++ b/augur/tasks/github/pull_requests/commits_model/tasks.py @@ -2,12 +2,15 @@ import traceback from augur.application.db.session import DatabaseSession from augur.tasks.github.pull_requests.commits_model.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query -@celery.task -def process_pull_request_commits(repo_git_identifiers: [str]) -> None: +@celery.task() +def process_pull_request_commits(repo_git_identifiers: str) -> None: + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_pull_request_commits.__name__) with DatabaseSession(logger, engine) as session: diff --git a/augur/tasks/github/pull_requests/core.py b/augur/tasks/github/pull_requests/core.py index 9cd8008993..03a6b80438 100644 --- a/augur/tasks/github/pull_requests/core.py +++ b/augur/tasks/github/pull_requests/core.py @@ -5,7 +5,6 @@ from augur.application.db.data_parse import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.util.util import add_key_value_pair_to_dicts from augur.tasks.util.worker_util import remove_duplicate_dicts @@ -224,6 +223,8 @@ def insert_pr_labels(labels: List[dict], logger: logging.Logger) -> None: labels: list of labels to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # we are using pr_src_id and pull_request_id to determine if the label is already in the database. @@ -241,6 +242,8 @@ def insert_pr_assignees(assignees: List[dict], logger: logging.Logger) -> None: assignees: list of assignees to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # we are using pr_assignee_src_id and pull_request_id to determine if the label is already in the database. @@ -258,6 +261,8 @@ def insert_pr_reviewers(reviewers: List[dict], logger: logging.Logger) -> None: reviewers: list of reviewers to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # we are using pr_src_id and pull_request_id to determine if the label is already in the database. @@ -275,6 +280,8 @@ def insert_pr_metadata(metadata: List[dict], logger: logging.Logger) -> None: metadata: list of metadata to insert logger: handles logging """ + from augur.tasks.init.celery_app import engine + with DatabaseSession(logger, engine) as session: # inserting pr metadata diff --git a/augur/tasks/github/pull_requests/files_model/core.py b/augur/tasks/github/pull_requests/files_model/core.py index e8c1cdb3f9..c2d810f911 100644 --- a/augur/tasks/github/pull_requests/files_model/core.py +++ b/augur/tasks/github/pull_requests/files_model/core.py @@ -4,7 +4,6 @@ import sqlalchemy as s from augur.application.db.data_parse import * from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.gh_graphql_entities import GraphQlPageCollection, hit_api_graphql @@ -13,84 +12,86 @@ from augur.application.db.util import execute_session_query def pull_request_files_model(repo_id,logger): + + from augur.tasks.init.celery_app import engine - # query existing PRs and the respective url we will append the commits url to - pr_number_sql = s.sql.text(""" - SELECT DISTINCT pr_src_number as pr_src_number, pull_requests.pull_request_id - FROM pull_requests--, pull_request_meta - WHERE repo_id = :repo_id - """).bindparams(repo_id=repo_id) - pr_numbers = [] - #pd.read_sql(pr_number_sql, self.db, params={}) + # query existing PRs and the respective url we will append the commits url to + pr_number_sql = s.sql.text(""" + SELECT DISTINCT pr_src_number as pr_src_number, pull_requests.pull_request_id + FROM pull_requests--, pull_request_meta + WHERE repo_id = :repo_id + """).bindparams(repo_id=repo_id) + pr_numbers = [] + #pd.read_sql(pr_number_sql, self.db, params={}) - # TODO: Is this session ever closed? - session = GithubTaskSession(logger, engine) - result = session.execute_sql(pr_number_sql).fetchall() - pr_numbers = [dict(zip(row.keys(), row)) for row in result] + # TODO: Is this session ever closed? + session = GithubTaskSession(logger, engine) + result = session.execute_sql(pr_number_sql).fetchall() + pr_numbers = [dict(zip(row.keys(), row)) for row in result] - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') - - owner, name = get_owner_repo(repo.repo_git) + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - pr_file_rows = [] - logger.info(f"Getting pull request files for repo: {repo.repo_git}") - for index,pr_info in enumerate(pr_numbers): + owner, name = get_owner_repo(repo.repo_git) - logger.info(f'Querying files for pull request #{index + 1} of {len(pr_numbers)}') - - query = """ + pr_file_rows = [] + logger.info(f"Getting pull request files for repo: {repo.repo_git}") + for index,pr_info in enumerate(pr_numbers): - query($repo: String!, $owner: String!,$pr_number: Int!, $numRecords: Int!, $cursor: String) { - repository(name: $repo, owner: $owner) { - pullRequest(number: $pr_number) { - files ( first: $numRecords, after: $cursor) - { - edges { - node { - additions - deletions - path - } - } - totalCount - pageInfo { - hasNextPage - endCursor + logger.info(f'Querying files for pull request #{index + 1} of {len(pr_numbers)}') + + query = """ + + query($repo: String!, $owner: String!,$pr_number: Int!, $numRecords: Int!, $cursor: String) { + repository(name: $repo, owner: $owner) { + pullRequest(number: $pr_number) { + files ( first: $numRecords, after: $cursor) + { + edges { + node { + additions + deletions + path } } + totalCount + pageInfo { + hasNextPage + endCursor + } } } } - """ - - values = ("repository","pullRequest","files") - params = { - 'owner' : owner, - 'repo' : name, - 'pr_number' : pr_info['pr_src_number'], - 'values' : values } + """ + + values = ("repository","pullRequest","files") + params = { + 'owner' : owner, + 'repo' : name, + 'pr_number' : pr_info['pr_src_number'], + 'values' : values + } - try: - file_collection = GraphQlPageCollection(query, session.oauths, session.logger,bind=params) + try: + file_collection = GraphQlPageCollection(query, session.oauths, session.logger,bind=params) - pr_file_rows += [{ - 'pull_request_id': pr_info['pull_request_id'], - 'pr_file_additions': pr_file['additions'] if 'additions' in pr_file else None, - 'pr_file_deletions': pr_file['deletions'] if 'deletions' in pr_file else None, - 'pr_file_path': pr_file['path'], - 'data_source': 'GitHub API', - 'repo_id': repo_id, - } for pr_file in file_collection if pr_file and 'path' in pr_file] - except Exception as e: - logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") - logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) + pr_file_rows += [{ + 'pull_request_id': pr_info['pull_request_id'], + 'pr_file_additions': pr_file['additions'] if 'additions' in pr_file else None, + 'pr_file_deletions': pr_file['deletions'] if 'deletions' in pr_file else None, + 'pr_file_path': pr_file['path'], + 'data_source': 'GitHub API', + 'repo_id': repo_id, + } for pr_file in file_collection if pr_file and 'path' in pr_file] + except Exception as e: + logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") + logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) - if len(pr_file_rows) > 0: - #Execute a bulk upsert with sqlalchemy - pr_file_natural_keys = ["pull_request_id", "repo_id", "pr_file_path"] - session.insert_data(pr_file_rows, PullRequestFile, pr_file_natural_keys) + if len(pr_file_rows) > 0: + #Execute a bulk upsert with sqlalchemy + pr_file_natural_keys = ["pull_request_id", "repo_id", "pr_file_path"] + session.insert_data(pr_file_rows, PullRequestFile, pr_file_natural_keys) diff --git a/augur/tasks/github/pull_requests/files_model/tasks.py b/augur/tasks/github/pull_requests/files_model/tasks.py index 813f71116c..c3105881bf 100644 --- a/augur/tasks/github/pull_requests/files_model/tasks.py +++ b/augur/tasks/github/pull_requests/files_model/tasks.py @@ -2,11 +2,14 @@ import traceback from augur.application.db.session import DatabaseSession from augur.tasks.github.pull_requests.files_model.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query -@celery.task +@celery.task() def process_pull_request_files(repo_git_identifiers: str) -> None: + + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(process_pull_request_files.__name__) with DatabaseSession(logger, engine) as session: diff --git a/augur/tasks/github/pull_requests/tasks.py b/augur/tasks/github/pull_requests/tasks.py index 9ec691595b..0e02e9c2ca 100644 --- a/augur/tasks/github/pull_requests/tasks.py +++ b/augur/tasks/github/pull_requests/tasks.py @@ -3,7 +3,7 @@ import traceback from augur.tasks.github.pull_requests.core import extract_data_from_pr_list -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.data_parse import * from augur.tasks.github.util.github_paginator import GithubPaginator, hit_api from augur.tasks.github.util.github_task_session import GithubTaskSession @@ -20,6 +20,10 @@ @celery.task def collect_pull_requests(repo_git_identifiers: [str]) -> None: + from augur.tasks.init.celery_app import engine + + print(f"Eventlet engine id: {id(engine)}") + logger = logging.getLogger(collect_pull_requests.__name__) with DatabaseSession(logger, engine) as session: @@ -45,6 +49,10 @@ def collect_pull_requests(repo_git_identifiers: [str]) -> None: # TODO: Fix column names in pull request labels table def retrieve_all_pr_data(repo_git: str, logger) -> None: + from augur.tasks.init.celery_app import engine + + print(f"Eventlet engine id: {id(engine)}") + owner, repo = get_owner_repo(repo_git) # define GithubTaskSession to handle insertions, and store oauth keys @@ -80,6 +88,8 @@ def retrieve_all_pr_data(repo_git: str, logger) -> None: def process_pull_requests(pull_requests, task_name, repo_id, logger): + from augur.tasks.init.celery_app import engine + tool_source = "Pr Task" tool_version = "2.0" data_source = "Github API" @@ -214,6 +224,8 @@ def process_pull_requests(pull_requests, task_name, repo_id, logger): @celery.task def pull_request_review_comments(repo_git: str) -> None: + + from augur.tasks.init.celery_app import engine owner, repo = get_owner_repo(repo_git) @@ -308,6 +320,8 @@ def pull_request_review_comments(repo_git: str) -> None: @celery.task def pull_request_reviews(repo_git: str, pr_number_list: [int]) -> None: + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(pull_request_reviews.__name__) owner, repo = get_owner_repo(repo_git) diff --git a/augur/tasks/github/releases/tasks.py b/augur/tasks/github/releases/tasks.py index a71f3da480..b4aa912c50 100644 --- a/augur/tasks/github/releases/tasks.py +++ b/augur/tasks/github/releases/tasks.py @@ -1,12 +1,14 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.tasks.github.releases.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query import traceback @celery.task def collect_releases(): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_releases.__name__) with GithubTaskSession(logger, engine) as session: query = session.query(Repo) diff --git a/augur/tasks/github/repo_info/tasks.py b/augur/tasks/github/repo_info/tasks.py index 37287542c7..b4bde898fb 100644 --- a/augur/tasks/github/repo_info/tasks.py +++ b/augur/tasks/github/repo_info/tasks.py @@ -1,13 +1,15 @@ from augur.tasks.github.util.github_task_session import GithubTaskSession from augur.application.db.session import DatabaseSession from augur.tasks.github.repo_info.core import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from augur.application.db.util import execute_session_query import traceback @celery.task def collect_repo_info(repo_git_identifiers: [str]): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(collect_repo_info.__name__) with GithubTaskSession(logger, engine) as session: diff --git a/augur/tasks/github/util/github_random_key_auth.py b/augur/tasks/github/util/github_random_key_auth.py index 3a1e8bec00..56933ab633 100644 --- a/augur/tasks/github/util/github_random_key_auth.py +++ b/augur/tasks/github/util/github_random_key_auth.py @@ -3,7 +3,6 @@ from augur.tasks.util.random_key_auth import RandomKeyAuth from augur.tasks.github.util.github_api_key_handler import GithubApiKeyHandler from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine class GithubRandomKeyAuth(RandomKeyAuth): diff --git a/augur/tasks/init/__init__.py b/augur/tasks/init/__init__.py index eb590a99ab..b58dcdee12 100644 --- a/augur/tasks/init/__init__.py +++ b/augur/tasks/init/__init__.py @@ -1,13 +1,14 @@ import logging from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.application.config import AugurConfig def get_redis_conn_values(): logger = logging.getLogger(__name__) - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: config = AugurConfig(logger, session) @@ -22,7 +23,7 @@ def get_redis_conn_values(): def get_rabbitmq_conn_string(): logger = logging.getLogger(__name__) - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: config = AugurConfig(logger, session) rabbbitmq_conn_string = config.get_value("RabbitMQ", "connection_string") diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 166b4303e5..0e13896df4 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -1,5 +1,5 @@ """Defines the Celery app.""" -from celery.signals import worker_process_init, worker_process_shutdown +from celery.signals import worker_process_init, worker_process_shutdown, eventlet_pool_started, eventlet_pool_preshutdown, eventlet_pool_postshutdown import logging from typing import List, Dict import os @@ -11,6 +11,7 @@ from augur.application.logs import TaskLogConfig from augur.application.db.session import DatabaseSession +from augur.application.db.engine import DatabaseEngine from augur.application.config import AugurConfig from augur.application.db.engine import get_database_string from augur.tasks.init import get_redis_conn_values, get_rabbitmq_conn_string @@ -56,9 +57,9 @@ celery_app = Celery('tasks', broker=BROKER_URL, backend=BACKEND_URL, include=tasks) # define the queues that tasks will be put in (by default tasks are put in celery queue) -celery_app.conf.task_routes = { - 'augur.tasks.git.facade_tasks.*': {'queue': 'cpu'} -} +# celery_app.conf.task_routes = { +# 'augur.tasks.start_tasks.*': {'queue': 'scheduling'} +# } #Setting to be able to see more detailed states of running tasks celery_app.conf.task_track_started = True @@ -116,8 +117,10 @@ def setup_periodic_tasks(sender, **kwargs): The tasks so that they are grouped by the module they are defined in """ from augur.tasks.start_tasks import start_task + from augur.tasks.start_tasks import non_repo_domain_tasks - with DatabaseSession(logger) as session: + + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: config = AugurConfig(logger, session) @@ -145,7 +148,7 @@ def init_worker(**kwargs): from augur.application.db.engine import DatabaseEngine - engine = DatabaseEngine().engine + engine = DatabaseEngine(pool_size=10, max_overflow=20, pool_timeout=240).engine @worker_process_shutdown.connect @@ -155,3 +158,4 @@ def shutdown_worker(**kwargs): logger.info('Closing database connectionn for worker') engine.dispose() + diff --git a/augur/tasks/init/celery_worker.py b/augur/tasks/init/celery_worker.py new file mode 100644 index 0000000000..c9a76569a7 --- /dev/null +++ b/augur/tasks/init/celery_worker.py @@ -0,0 +1,4 @@ +from celery.signals import worker_process_init, worker_process_shutdown + +print("Celery worker") + diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 2dae9d8f8c..f722eb770a 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -24,12 +24,12 @@ from augur.tasks.git.facade_tasks import * from augur.tasks.db.refresh_materialized_views import * # from augur.tasks.data_analysis import * -from augur.tasks.init.celery_app import celery_app as celery, engine +from augur.tasks.init.celery_app import celery_app as celery from celery.result import allow_join_result from augur.application.logs import AugurLogger from augur.application.config import AugurConfig from augur.application.db.session import DatabaseSession -from augur.tasks.init.celery_app import engine +from augur.application.db.engine import DatabaseEngine from augur.application.db.util import execute_session_query from logging import Logger @@ -42,8 +42,8 @@ def prelim_phase(): logger = logging.getLogger(prelim_phase.__name__) - - with DatabaseSession(logger) as session: + job = None + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') repo_git_list = [repo.repo_git for repo in repos] @@ -65,11 +65,10 @@ def repo_collect_phase(): np_clustered_array = [] #A chain is needed for each repo. - with DatabaseSession(logger) as session: + with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: query = session.query(Repo) repos = execute_session_query(query, 'all') - all_repo_git_identifiers = [repo.repo_git for repo in repos] #Cluster each repo in groups of 80. np_clustered_array = np.array_split(all_repo_git_identifiers,math.ceil(len(all_repo_git_identifiers)/80)) @@ -205,6 +204,8 @@ def start_data_collection(self): @celery.task def start_task(): + from augur.tasks.init.celery_app import engine + logger = logging.getLogger(start_task.__name__) #Get phase options from the config From e5fe29a6b3c5d3e5b3630bbae7cd91e1d0499349 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 2 Feb 2023 22:08:50 -0600 Subject: [PATCH 086/134] Dev facade fixes (#2165) * facade fixes Signed-off-by: Isaac Milarsky * session close Signed-off-by: Isaac Milarsky --------- Signed-off-by: Isaac Milarsky Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 17 ++++++++--------- .../facade_worker/facade07rebuildcache.py | 5 +++-- augur/tasks/github/facade_github/tasks.py | 5 ++--- augur/tasks/start_tasks.py | 2 +- 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 8ba559b4d0..258840be5c 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -350,10 +350,9 @@ def nuke_affiliations_facade_task(): from augur.tasks.init.celery_app import engine logger = logging.getLogger(nuke_affiliations_facade_task.__name__) - # TODO: Is this session ever closed? - session = FacadeSession(logger) - - nuke_affiliations(session) + + with FacadeSession(logger) as session: + nuke_affiliations(session) @celery.task def fill_empty_affiliations_facade_task(): @@ -515,7 +514,7 @@ def generate_contributor_sequence(logger,repo_git_identifiers): -def generate_facade_chain(logger,repo_git_identifiers): +def generate_facade_chain(logger,repo_git_identifiers, firstRun=False): #raise NotImplemented logger.info("Generating facade sequence") @@ -567,17 +566,17 @@ def generate_facade_chain(logger,repo_git_identifiers): #Generate contributor analysis task group. facade_sequence.append(generate_contributor_sequence(logger,repo_git_identifiers)) - if nuke_stored_affiliations: + if nuke_stored_affiliations and firstRun: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) #session.logger.info(session.cfg) - if not limited_run or (limited_run and fix_affiliations): + if not limited_run or (limited_run and fix_affiliations) and firstRun: facade_sequence.append(fill_empty_affiliations_facade_task.si().on_error(facade_error_handler.s()))#fill_empty_affiliations(session) - if force_invalidate_caches: + if force_invalidate_caches and firstRun: facade_sequence.append(invalidate_caches_facade_task.si().on_error(facade_error_handler.s()))#invalidate_caches(session.cfg) - if not limited_run or (limited_run and rebuild_caches): + if not limited_run or (limited_run and rebuild_caches) and firstRun: facade_sequence.append(rebuild_unknown_affiliation_and_web_caches_facade_task.si().on_error(facade_error_handler.s()))#rebuild_unknown_affiliation_and_web_caches(session.cfg) logger.info(f"Facade sequence: {facade_sequence}") diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py b/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py index 385de4dc36..252f82b0cd 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py @@ -43,7 +43,7 @@ # else: # import MySQLdb -def nuke_affiliations(session): +def nuke_affiliations(session, repo_id_list): # Delete all stored affiliations in the database. Normally when you # add/remove/change affiliation data via the web UI, any potentially affected @@ -56,7 +56,8 @@ def nuke_affiliations(session): session.log_activity('Info','Nuking affiliations') nuke = s.sql.text("""UPDATE commits SET cmt_author_affiliation = NULL, - cmt_committer_affiliation = NULL""") + cmt_committer_affiliation = NULL + WHERE repo_id IN :values""").bindparams(values=tuple(repo_id_list)) session.execute_sql(nuke) diff --git a/augur/tasks/github/facade_github/tasks.py b/augur/tasks/github/facade_github/tasks.py index 11fd9484c3..9e5ea65ba2 100644 --- a/augur/tasks/github/facade_github/tasks.py +++ b/augur/tasks/github/facade_github/tasks.py @@ -210,10 +210,9 @@ def link_commits_to_contributor(session,contributorQueue): query = s.sql.text(""" UPDATE commits SET cmt_ght_author_id=:cntrb_id - WHERE cmt_committer_email=:cntrb_email - OR cmt_author_raw_email=:cntrb_email + WHERE + cmt_author_raw_email=:cntrb_email OR cmt_author_email=:cntrb_email - OR cmt_committer_raw_email=:cntrb_email """).bindparams(cntrb_id=cntrb["cntrb_id"],cntrb_email=cntrb["email"]) #engine.execute(query, **data) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index f722eb770a..9e41015a82 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -96,7 +96,7 @@ def repo_collect_phase(): repo_task_group = group( *repo_info_tasks, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), - chain(generate_facade_chain(logger,first_pass),create_grouped_task_load(dataList=first_pass,task=process_dependency_metrics)), + chain(generate_facade_chain(logger,first_pass,firstRun=True),create_grouped_task_load(dataList=first_pass,task=process_dependency_metrics)), collect_releases.si() ) From 3c56b2743f3c00e7c38e79422d9c8d60289514ac Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 2 Feb 2023 22:09:26 -0600 Subject: [PATCH 087/134] apply exponential backoff to dev (#2166) * apply exponential backoff to dev Signed-off-by: Isaac Milarsky * patch insert_or_update_data Signed-off-by: Isaac Milarsky --------- Signed-off-by: Isaac Milarsky --- augur/application/db/util.py | 6 +++++- augur/tasks/data_analysis/insight_worker/tasks.py | 2 +- .../git/util/facade_worker/facade_worker/facade01config.py | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/augur/application/db/util.py b/augur/application/db/util.py index 153b3e2deb..9fa49ab00d 100644 --- a/augur/application/db/util.py +++ b/augur/application/db/util.py @@ -7,12 +7,16 @@ def catch_operational_error(func): attempts = 0 error = None + timeout = 240 + while attempts < 4: # do the sleep here instead of instead of in the exception # so it doesn't sleep after the last failed time if attempts > 0: - time.sleep(240) + #Do a 30% exponential backoff + time.sleep(timeout) + timeout = int(timeout * 1.3) try: return func() except OperationalError as e: diff --git a/augur/tasks/data_analysis/insight_worker/tasks.py b/augur/tasks/data_analysis/insight_worker/tasks.py index aefc849744..334f67510d 100644 --- a/augur/tasks/data_analysis/insight_worker/tasks.py +++ b/augur/tasks/data_analysis/insight_worker/tasks.py @@ -568,7 +568,7 @@ def clear_insights(repo_id, new_endpoint, new_field, logger): AND ri_field = '{}' """.format(repo_id, new_endpoint, new_field) try: - with DatabaseEngine(connection_pool_size=1) as engine: + with DatabaseEngine(1) as engine: result = engine.execute(deleteSQL) except Exception as e: logger.info("Error occured deleting insight slot: {}".format(e)) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py index ca58994411..89aba0f9a5 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py @@ -223,7 +223,7 @@ def insert_or_update_data(self, query, **bind_args)-> None: if isinstance(e.orig, DeadlockDetected): deadlock_detected = True sleep_time = random.choice(sleep_time_list) - self.logger.debug(f"Deadlock detected on {table.__table__} table...trying again in {round(sleep_time)} seconds: transaction size: {len(data)}") + self.logger.debug(f"Deadlock detected on query {query}...trying again in {round(sleep_time)} seconds: transaction size: {len(data)}") time.sleep(sleep_time) attempts += 1 From cf59727df09f79162a75c8771ca930689355dd77 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 05:57:14 -0600 Subject: [PATCH 088/134] Update new-install.md Updated `dev` branch instructions for installation and configuration. --- docs/new-install.md | 178 ++++++++++++++++++++++++++++---------------- 1 file changed, 115 insertions(+), 63 deletions(-) diff --git a/docs/new-install.md b/docs/new-install.md index 2c6586fff2..40c5659407 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -1,35 +1,123 @@ ## Augur Setup -## General Pre-Requisites +# Ubuntu 20.04.x +We default to this version of Ubuntu for the moment because Augur does not yet support python3.10, which is the default version of python3.x distributed with Ubuntu 22.0x.x + +## Git Platform Requirements (Things to have setup prior to initiating installation.) +1. Obtain a GitHub Access Token: https://github.com/settings/tokens +2. Obtain a GitLab Access Token: https://gitlab.com/-/profile/personal_access_tokens + ### Fork and Clone Augur 1. Fork https://github.com/chaoss/augur 2. Clone your fork. We recommend creating a `github` directory in your user's base directory. -### Frontend (Optional) -1. Install NodeJS (Optional if only running the backend) `sudo apt update` and `sudo apt install nodejs` -2. Install NPM (Optional if only running the backend) `sudo apt install npm` -3. Install nvm (node version manager: recommended) `sudo apt install curl` and `curl https://raw.githubusercontent.com/creationix/nvm/master/install.sh | bash` -3. Install vue.js (Optional if only running the backend) -4. Install vue-cli (Optional if only running the backend) +## Pre-Requisite Operating System Level Packages +Here we ensure your system is up to date, install required python libraries, install postgresql, and install our queuing infrastrucutre, which is composed of redis-server and rabbitmq-server +```shell +sudo apt update && +sudo apt upgrade && +sudo apt install software-properties-common && +sudo apt install python3-dev && +sudo apt install python3.8-venv && +sudo apt install postgresql postgresql-contrib postgresql-client && +sudo apt install build-essential && +sudo apt install redis-server && # required +sudo apt install erlang && # required +sudo apt install rabbitmq-server && #required +sudo snap install go --classic && #required: Go Needs to be version 1.19.x or higher. Snap is the package manager that gets you to the right version. Classic enables it to actually be installed at the correct version. +sudo apt install nginx && # required for hosting +sudo apt install firefox-geckodriver # required for visualization API +``` -### Backend (Ubuntu instructions, use Linux Distro Appropriate Package Manager Otherwise) -1. Obtain a GitHub Access Token: https://github.com/settings/tokens -2. Obtain a GitLab Access Token: https://gitlab.com/-/profile/personal_access_tokens -3. Python3 needs to be installed, and typically is by default on most systems. -4. Make sure all of your core libraries are installed at the operating system level. Often, these days, they are by default, and its important to make sure: -```bash - sudo apt update - sudo apt upgrade - sudo apt install software-properties-common - sudo apt install python3-dev - sudo apt install postgresql postgresql-contrib postgresql-client - sudo apt install build-essential +## Git Configuration +There are some Git configuration parameters that help when you are cloning repos over time, and a platform prompts you for credentials when it finds a repo is deleted: +```shell + git config --global diff.renames true + git config --global diff.renameLimit 200000 + git config --global credential.helper cache + git config --global credential.helper 'cache --timeout=9999999999999' ``` -5. If you are running on Ubuntu 22.x right now, you will need to install python 3.9 - - `sudo add-apt-repository ppa:deadsnakes/ppa` - - `sudo apt install python3.9` - - `sudo apt install python3.9-distutils` -6. Install pip: `sudo apt install python3-pip` and `sudo apt install python3.9-venv` + + +## Postgresql Configuration +Create a PostgreSQL database for Augur to use +```shell +sudo su - +su - postgres +psql +``` + +Then, from within the resulting postgresql shell: +```sql +CREATE DATABASE augur; +CREATE USER augur WITH ENCRYPTED PASSWORD 'password'; +GRANT ALL PRIVILEGES ON DATABASE augur TO augur; +``` + +Once you are successfully logged out, return to your user by exiting `psql`, then typing `exit` to exit the postgres user, and `exit` a SECOND time to exit the root user. +``` +postgres=# \quit +``` + +```shell +exit +exit +``` + +## Rabbitmq Broker Configuration +You have to setup a specific user, and broker host for your augur instance. You can accomplish this by running the below commands: +``` +sudo rabbitmqctl add_user augur password123 +sudo rabbitmqctl add_vhost augur_vhost +sudo rabbitmqctl set_user_tags augur augurTag +sudo rabbitmqctl set_permissions -p augur_vhost augur ".*" ".*" ".*" +``` + +NOTE: it is important to have a static hostname when using rabbitmq as it uses hostname to communicate with nodes. + +If your setup of rabbitmq is successful your broker url should look like this: + +**`broker_url = 'amqp://augur:password123@localhost:5672/augur_vhost'`** + +**During Augur installation, you will be prompted for this broker_url** + + + +## Installing and Configuring Augur! +Create a Python Virtual Environment `python3 -m venv ~/virtual-env-directory` + +Activate your Python Virtual Environment `source ~/virtual-env-directory/bin/activate` + +From the root of the Augur Directory, type `make install` + +You will be prompted to provide your GitHub username and password, your GitLab username and password, and the postgresql database where you want to have the Augur Schema built. You will also be prompted to provide a directory where repositories will be clone into. + +## Post Installation of Augur + +### Redis Broker Configuration +If applications other than Augur are running on the same server, and using `redis-server` it is important to ensure that Augur and these other applications (or additional instances of Augur) are using distinct "cache_group". You can change from the default value of zero by editing the `augur_operations.config` table directly, looking for the "Redis" section_name, and the "cache_group" setting_name. This SQL is also a template: +```sql +UPDATE augur_operations.config +SET value = 2 +WHERE +section_name='Redis' +AND +setting_name='cache_group'; +``` + +## Augur Commands + +To access command line options, use `augur --help`. To load repos from GitHub organizations prior to collection, or in other ways, the direct route is `augur db --help`. + +Start a Flower Dashboard, which you can use to monitor progress, and report any failed processes as issues on the Augur GitHub site. The error rate for tasks is currently 0.04%, and most errors involve unhandled platform API timeouts. We continue to identify and add fixes to handle these errors through additional retries. Starting Flower: `(nohup celery -A augur.tasks.init.celery_app.celery_app flower --port=8400 --max-tasks=1000000 &)` NOTE: You can use any open port on your server, and access the dashboard in a browser with http://servername-or-ip:8400 in the example above (assuming you have access to that port, and its open on your network.) + +## Starting your Augur Instance +Start Augur: `(nohup augur backend start &)` + +When data collection is complete you will see only a single task running in your flower Dashboard. + +## Stopping your Augur Instance +You can stop augur with `augur backend stop`, followed by `augur backend kill`. We recommend waiting 5 minutes between commands so Augur can shutdown more gently. There is no issue with data integrity if you issue them seconds apart, its just that stopping is nicer than killing. ### Docker 1. Make sure docker, and docker-compose are both installed @@ -41,44 +129,8 @@ 5. `sudo docker-compose --env-file ./environment.txt --file docker-compose.yml up` to run the database in a Docker Container or `sudo docker-compose --env-file ./environment.txt --file docker-compose.yml up` to connect to an already running database. -### Regular Installation -0. Follow the installation instructions for the database here: https://oss-augur.readthedocs.io/en/main/quick-start.html#postgresql-installation -1. Clone Augur, or clone your fork of Augur if you wish to make contributions -2. Install `redis-server` at the operating system level `sudo apt install redis-server` -3. Install rabbitmq. Instructions for installing rabbitmq can be found here: https://www.rabbitmq.com/download.html - -NOTE: To set up the rabbitmq instance and get it working see the quick-start section before running make install - -3. Make sure you have `Go` version is 1.19.3. If you don't know how to install `Go`, instructions are provided during the installation process. After following the instructions, you will need to add Go to your path for this session: `export PATH=$PATH:/usr/local/go/bin`. You should also add this to your shell's profile script. -4. Create a Python Virtual Environment `python3 -m venv ~/virtual-env-directory` (use `python3.9 -m venv` if on Ubuntu 22.04, as it defaults to python 3.10, which will not compile the machine learning workers.) -5. Activate your Python Virtual Environment `source ~/virtual-env-directory/bin/activate` -6. From the root of the Augur Directory, type `make install` -7. You will be prompted to provide your GitHub username and password, your GitLab username and password, and the postgresql database where you want to have the Augur Schema built. You will also be prompted to provide a directory where repositories will be clone into. -8. To access command line options, use `augur --help`. To load repos from GitHub organizations prior to collection, or in other ways, the direct route is `augur db --help`. -9. Start a Flower Dashboard, which you can use to monitor progress, and report any failed processes as issues on the Augur GitHub site. The error rate for tasks is currently 0.04%, and most errors involve unhandled platform API timeouts. We continue to identify and add fixes to handle these errors through additional retries. Starting Flower: `(nohup celery -A augur.tasks.init.celery_app.celery_app flower --port=8400 --max-tasks=1000000 &)` NOTE: You can use any open port on your server, and access the dashboard in a browser with http://servername-or-ip:8400 in the example above (assuming you have access to that port, and its open on your network.) -10. Start Augur: `(nohup augur backend start &)` -11. When data collection is complete you will see only a single task running in your flower Dashboard. -12. You can stop augur with `augur backend stop`, followed by `augur backend kill`. We recommend waiting 5 minutes between commands so Augur can shutdown more gently. There is no issue with data integrity if you issue them seconds apart, its just that stopping is nicer than killing. -13. If you wish to run the frontend, create a file called `frontend/frontend.config.json following this structure, with relevant values`: (Default port is 5000. This can be changed in the `augur_operations.config` table.). Then run `npm install` and `npm run build` in the frontend directory. `npm run serve will make a development server (usually on your local machine)` version of the frontend available. If you wish to run Augur's frontend through nginx, you can follow these instructions here: https://oss-augur.readthedocs.io/en/augur-new/deployment/nginx-configuration.html?highlight=nginx#nginx and here: https://oss-augur.readthedocs.io/en/augur-new/deployment/nginx-configuration.html?highlight=nginx#site-configuration -NOTE: `"host": "your resolvable server"` - ```json - { - "Frontend": { - "host": "chaoss.tv", - "port": 5000, - "ssl": false - }, - "Server": { - "cache_expire": "3600", - "host": "0.0.0.0", - "port": 5000, - "workers": 6, - "timeout": 6000, - "ssl": false, - "ssl_cert_file": null, - "ssl_key_file": null - } - } - ``` +### Errata (Old Frontend) + 14. If you have frontend configuration issues that result in a *failure* to complete steps with npm, we recommend you install and use `nvm`: https://tecadmin.net/how-to-install-nvm-on-ubuntu-20-04/ to set your nodejs release to the latest LTS of 12.x or 16.x. For example: `nvm ls-remote | grep -i 'latest'` and `nvm alias default 16.??` (whatever the latest version of 16 is.) 15. Also, please explore our new frontend, being developed at https://github.com/augurlabs/augur_view. The `dev` branch is the most current. + From 9f179d2057b65ebbb38c0bd8ff7d2c0570487e50 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 09:42:26 -0600 Subject: [PATCH 089/134] Update new-install.md --- docs/new-install.md | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/docs/new-install.md b/docs/new-install.md index 40c5659407..99621798de 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -13,6 +13,25 @@ We default to this version of Ubuntu for the moment because Augur does not yet s ## Pre-Requisite Operating System Level Packages Here we ensure your system is up to date, install required python libraries, install postgresql, and install our queuing infrastrucutre, which is composed of redis-server and rabbitmq-server + +### Executable +```shell +sudo apt update && +sudo apt upgrade && +sudo apt install software-properties-common && +sudo apt install python3-dev && +sudo apt install python3.8-venv && +sudo apt install postgresql postgresql-contrib postgresql-client && +sudo apt install build-essential && +sudo apt install redis-server && +sudo apt install erlang && +sudo apt install rabbitmq-server && +sudo snap install go --classic && +sudo apt install nginx && +sudo apt install firefox-geckodriver +``` + +### Annotated ```shell sudo apt update && sudo apt upgrade && From f8ad629de79ae089ee2a90f71af6ee106e7d2da7 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 09:51:06 -0600 Subject: [PATCH 090/134] Update new-install.md --- docs/new-install.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/new-install.md b/docs/new-install.md index 99621798de..da35ea3fb3 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -85,7 +85,7 @@ exit ## Rabbitmq Broker Configuration You have to setup a specific user, and broker host for your augur instance. You can accomplish this by running the below commands: -``` +```shell sudo rabbitmqctl add_user augur password123 sudo rabbitmqctl add_vhost augur_vhost sudo rabbitmqctl set_user_tags augur augurTag From 039e817faf62e412f55013571bcac52f01c81a55 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 09:52:25 -0600 Subject: [PATCH 091/134] Update new-install.md --- docs/new-install.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/new-install.md b/docs/new-install.md index da35ea3fb3..e6920b8dcc 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -86,9 +86,9 @@ exit ## Rabbitmq Broker Configuration You have to setup a specific user, and broker host for your augur instance. You can accomplish this by running the below commands: ```shell -sudo rabbitmqctl add_user augur password123 -sudo rabbitmqctl add_vhost augur_vhost -sudo rabbitmqctl set_user_tags augur augurTag +sudo rabbitmqctl add_user augur password123 || +sudo rabbitmqctl add_vhost augur_vhost && +sudo rabbitmqctl set_user_tags augur augurTag && sudo rabbitmqctl set_permissions -p augur_vhost augur ".*" ".*" ".*" ``` From ff69eb60786d2efc9a9a18262e13aed53e6f1c53 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 10:01:41 -0600 Subject: [PATCH 092/134] Update new-install.md --- docs/new-install.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/new-install.md b/docs/new-install.md index e6920b8dcc..ace3e981d2 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -96,7 +96,7 @@ NOTE: it is important to have a static hostname when using rabbitmq as it uses h If your setup of rabbitmq is successful your broker url should look like this: -**`broker_url = 'amqp://augur:password123@localhost:5672/augur_vhost'`** +**broker_url = `amqp://augur:password123@localhost:5672/augur_vhost`** **During Augur installation, you will be prompted for this broker_url** From 5afc66f2e5543608466ef4e8136c86f2a35467f3 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 10:23:58 -0600 Subject: [PATCH 093/134] Update new-install.md --- docs/new-install.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/new-install.md b/docs/new-install.md index ace3e981d2..55e230b3a8 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -135,6 +135,11 @@ Start Augur: `(nohup augur backend start &)` When data collection is complete you will see only a single task running in your flower Dashboard. +## Accessing Repo Addition and Visualization Front End +Your Augur intance will now be available at http://hostname.io:port_number + +For example: http://chaoss.tv:5038 + ## Stopping your Augur Instance You can stop augur with `augur backend stop`, followed by `augur backend kill`. We recommend waiting 5 minutes between commands so Augur can shutdown more gently. There is no issue with data integrity if you issue them seconds apart, its just that stopping is nicer than killing. From 9521e9df84fe58c5de6b3fc51e98b4ffdf6b9a6e Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 10:26:05 -0600 Subject: [PATCH 094/134] Update new-install.md --- docs/new-install.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/new-install.md b/docs/new-install.md index 55e230b3a8..63f7a90f45 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -138,7 +138,9 @@ When data collection is complete you will see only a single task running in your ## Accessing Repo Addition and Visualization Front End Your Augur intance will now be available at http://hostname.io:port_number -For example: http://chaoss.tv:5038 +For example: http://chaoss.tv:5038 + +Note: Augur will run on port 5000 by default (you probably need to change that in augur_operations.config for OSX) ## Stopping your Augur Instance You can stop augur with `augur backend stop`, followed by `augur backend kill`. We recommend waiting 5 minutes between commands so Augur can shutdown more gently. There is no issue with data integrity if you issue them seconds apart, its just that stopping is nicer than killing. From 34dfb2ad390ea8e78dc2ea78154ec60d6a12c359 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 3 Feb 2023 10:31:10 -0600 Subject: [PATCH 095/134] Update new-install.md --- docs/new-install.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/new-install.md b/docs/new-install.md index 63f7a90f45..370be06a23 100644 --- a/docs/new-install.md +++ b/docs/new-install.md @@ -100,7 +100,30 @@ If your setup of rabbitmq is successful your broker url should look like this: **During Augur installation, you will be prompted for this broker_url** +## Proxying Augur through Nginx +Assumes nginx is installed. +Then you create a file for the server you want Augur to run under in the location of your `sites-enabled` directory for nginx (In this example, Augur is running on port 5038: + +``` +server { + listen 80; + server_name zoo.chaoss.tv; + + location /api/unstable/ { + proxy_pass http://zoo.chaoss.tv:5038; + proxy_set_header Host $host; + } + + location / { + proxy_pass http://127.0.0.1:5038; + } + + error_log /var/log/nginx/augurview.osshealth.error.log; + access_log /var/log/nginx/augurview.osshealth.access.log; + +} +``` ## Installing and Configuring Augur! Create a Python Virtual Environment `python3 -m venv ~/virtual-env-directory` From e3dffd3f0234483f2792c94a3886cee644d52853 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 6 Feb 2023 14:02:35 -0600 Subject: [PATCH 096/134] patch mismatched function call (#2169) Signed-off-by: Isaac Milarsky --- .../util/facade_worker/facade_worker/facade07rebuildcache.py | 5 ++--- augur/tasks/start_tasks.py | 3 +-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py b/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py index 252f82b0cd..385de4dc36 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade07rebuildcache.py @@ -43,7 +43,7 @@ # else: # import MySQLdb -def nuke_affiliations(session, repo_id_list): +def nuke_affiliations(session): # Delete all stored affiliations in the database. Normally when you # add/remove/change affiliation data via the web UI, any potentially affected @@ -56,8 +56,7 @@ def nuke_affiliations(session, repo_id_list): session.log_activity('Info','Nuking affiliations') nuke = s.sql.text("""UPDATE commits SET cmt_author_affiliation = NULL, - cmt_committer_affiliation = NULL - WHERE repo_id IN :values""").bindparams(values=tuple(repo_id_list)) + cmt_committer_affiliation = NULL""") session.execute_sql(nuke) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 9e41015a82..dfb10f5a6b 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -78,7 +78,7 @@ def repo_collect_phase(): logger.info(f"Scheduling groups of {len(first_pass)}") #Pool the tasks for collecting repo info. repo_info_tasks = create_grouped_task_load(dataList=first_pass, task=collect_repo_info).tasks - + #pool the repo collection jobs that should be ran first and have deps. primary_repo_jobs = group( *create_grouped_task_load(dataList=first_pass, task=collect_issues).tasks, @@ -91,7 +91,6 @@ def repo_collect_phase(): *create_grouped_task_load(dataList=first_pass, task=process_pull_request_files).tasks, *create_grouped_task_load(dataList=first_pass, task=process_pull_request_commits).tasks ) - repo_task_group = group( *repo_info_tasks, From 06e75c1af422497c0314f586b32edec8214d6760 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Mon, 6 Feb 2023 14:12:44 -0600 Subject: [PATCH 097/134] Simply flask app definition (#2168) * Simplify flask app Signed-off-by: Andrew Brain * Fix auggie routes' Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Update frontend with flask changes Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> * Fix various bugs Signed-off-by: Andrew Brain * Fix issues Signed-off-by: Andrew Brain * Update metadata.py * Update README.md --------- Signed-off-by: Andrew Brain Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Co-authored-by: Sean P. Goggins --- README.md | 4 +- augur/api/metrics/commit.py | 4 +- augur/api/metrics/contributor.py | 4 +- augur/api/metrics/deps.py | 3 +- augur/api/metrics/insight.py | 3 +- augur/api/metrics/issue.py | 3 +- augur/api/metrics/message.py | 3 +- augur/api/metrics/pull_request.py | 4 +- augur/api/metrics/release.py | 3 +- augur/api/metrics/repo_meta.py | 14 +- augur/api/metrics/toss.py | 3 +- augur/api/routes/__init__.py | 10 + augur/api/routes/auggie.py | 356 +- augur/api/routes/batch.py | 270 +- augur/api/routes/collection_status.py | 293 +- augur/api/routes/config.py | 50 +- augur/api/routes/contributor_reports.py | 1940 +++++----- augur/api/routes/manager.py | 605 ++-- augur/api/routes/metadata.py | 131 +- augur/api/routes/nonstandard_metrics.py | 31 +- augur/api/routes/pull_request_reports.py | 3176 ++++++++--------- augur/api/routes/user.py | 681 ++-- augur/api/routes/util.py | 410 +-- augur/api/server.py | 655 ++-- augur/api/view/api.py | 260 +- augur/api/view/augur_view.py | 141 +- augur/api/view/routes.py | 614 ++-- augur/application/cli/backend.py | 10 +- metadata.py | 4 +- .../test_commit_routes_api.py | 18 +- .../test_contributor_routes_api.py | 8 +- .../test_issue_routes_api.py | 67 +- .../test_pull_request_routes_api.py | 6 +- .../test_repo_meta_routes_api.py | 34 +- 34 files changed, 4859 insertions(+), 4959 deletions(-) diff --git a/README.md b/README.md index 64830bc4f8..d8b8897040 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Augur NEW Release v0.43.10 +# Augur NEW Release v0.44.0 [![first-timers-only](https://img.shields.io/badge/first--timers--only-friendly-blue.svg?style=flat-square)](https://www.firsttimersonly.com/) We follow the [First Timers Only](https://www.firsttimersonly.com/) philosophy of tagging issues for first timers only, and walking one newcomer through the resolution process weekly. [You can find these issues tagged with "first timers only" on our issues list.](https://github.com/chaoss/augur/labels/first-timers-only). [![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) [![Build Docker images](https://github.com/chaoss/augur/actions/workflows/build_docker.yml/badge.svg)](https://github.com/chaoss/augur/actions/workflows/build_docker.yml) [![Hits-of-Code](https://hitsofcode.com/github/chaoss/augur?branch=main)](https://hitsofcode.com/github/chaoss/augur/view?branch=main) [![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/2788/badge)](https://bestpractices.coreinfrastructure.org/projects/2788) @@ -6,7 +6,7 @@ ## NEW RELEASE ALERT! [If you want to jump right in, updated docker build/compose and bare metal installation instructions are available here](docs/new-install.md) -Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.43.10 +Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.44.0 - The `main` branch is a stable version of our new architecture, which features: - Dramatic improvement in the speed of large scale data collection (10,000+ repos). All data is obtained for 10k+ repos within a week - A new job management architecture that uses Celery and Redis to manage queues, and enables users to run a Flower job monitoring dashboard diff --git a/augur/api/metrics/commit.py b/augur/api/metrics/commit.py index 3bd509073e..c143cd9f6e 100644 --- a/augur/api/metrics/commit.py +++ b/augur/api/metrics/commit.py @@ -8,9 +8,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine - -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric() def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, period='month'): diff --git a/augur/api/metrics/contributor.py b/augur/api/metrics/contributor.py index f5932319db..7d255ecb46 100644 --- a/augur/api/metrics/contributor.py +++ b/augur/api/metrics/contributor.py @@ -9,9 +9,7 @@ from augur.api.util import register_metric import uuid -from augur.application.db.engine import DatabaseEngine - -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric() def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/deps.py b/augur/api/metrics/deps.py index ff264ed41c..deb5ac89fd 100644 --- a/augur/api/metrics/deps.py +++ b/augur/api/metrics/deps.py @@ -7,8 +7,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric() def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/insight.py b/augur/api/metrics/insight.py index d81b1cfa3f..874f656f75 100644 --- a/augur/api/metrics/insight.py +++ b/augur/api/metrics/insight.py @@ -7,8 +7,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric(type="repo_group_only") def top_insights(repo_group_id, num_repos=6): diff --git a/augur/api/metrics/issue.py b/augur/api/metrics/issue.py index b20342d667..72108bc20b 100644 --- a/augur/api/metrics/issue.py +++ b/augur/api/metrics/issue.py @@ -8,8 +8,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric() def issues_first_time_opened(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/message.py b/augur/api/metrics/message.py index 7b97397804..8c36c3a4c2 100644 --- a/augur/api/metrics/message.py +++ b/augur/api/metrics/message.py @@ -9,8 +9,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric() def repo_messages(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/pull_request.py b/augur/api/metrics/pull_request.py index fd2754afa8..9fbcc61757 100644 --- a/augur/api/metrics/pull_request.py +++ b/augur/api/metrics/pull_request.py @@ -8,9 +8,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine - +from ..server import engine @register_metric() def pull_requests_merge_contributor_new(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/release.py b/augur/api/metrics/release.py index 08dfb2f0a7..60f7793652 100644 --- a/augur/api/metrics/release.py +++ b/augur/api/metrics/release.py @@ -8,8 +8,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric() def releases(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None): diff --git a/augur/api/metrics/repo_meta.py b/augur/api/metrics/repo_meta.py index b20d3421f1..ca4d9668e2 100644 --- a/augur/api/metrics/repo_meta.py +++ b/augur/api/metrics/repo_meta.py @@ -10,9 +10,7 @@ import logging from augur.api.util import register_metric - -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine logger = logging.getLogger("augur") @@ -800,8 +798,8 @@ def annual_lines_of_code_count_ranked_by_new_repo_in_repo_group(repo_group_id, r LIMIT 10 """) - results = pd.read_sql(cdRgNewrepRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, - "repo_id": repo_id, "calendar_year": calendar_year}) + results = pd.read_sql(cdRgNewrepRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, + "repo_id": repo_id, "calendar_year": calendar_year}) return results @register_metric() @@ -897,8 +895,8 @@ def annual_lines_of_code_count_ranked_by_repo_in_repo_group(repo_group_id, repo_ - results = pd.read_sql(cdRgTpRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, - "repo_id": repo_id}) + results = pd.read_sql(cdRgTpRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id, + "repo_id": repo_id}) return results @register_metric() @@ -951,7 +949,7 @@ def lines_of_code_commit_counts_by_calendar_year_grouped(repo_url, calendar_year """) - results = pd.read_sql(cdRepTpIntervalLocCommitsSQL, engine, params={"repourl": '%{}%'.format(repo_url), 'calendar_year': calendar_year}) + results = pd.read_sql(cdRepTpIntervalLocCommitsSQL, engine, params={"repourl": '%{}%'.format(repo_url), 'calendar_year': calendar_year}) return results @register_metric() diff --git a/augur/api/metrics/toss.py b/augur/api/metrics/toss.py index 71af07cd5a..122cb35679 100644 --- a/augur/api/metrics/toss.py +++ b/augur/api/metrics/toss.py @@ -4,8 +4,7 @@ import pandas as pd from augur.api.util import register_metric -from augur.application.db.engine import DatabaseEngine -engine = DatabaseEngine(connection_pool_size=1).engine +from ..server import engine @register_metric(type="toss") def toss_pull_request_acceptance_rate(repo_id, begin_date=None, end_date=None, group_by='week'): diff --git a/augur/api/routes/__init__.py b/augur/api/routes/__init__.py index f4cc69cb4e..953a92b02a 100644 --- a/augur/api/routes/__init__.py +++ b/augur/api/routes/__init__.py @@ -1 +1,11 @@ AUGUR_API_VERSION = 'api/unstable' + +from .batch import * +from .collection_status import * +from .config import * +from .contributor_reports import * +from .manager import * +from .nonstandard_metrics import * +from .pull_request_reports import * +from .user import * +from .util import * diff --git a/augur/api/routes/auggie.py b/augur/api/routes/auggie.py index 672ca64257..122e73ea3b 100644 --- a/augur/api/routes/auggie.py +++ b/augur/api/routes/auggie.py @@ -14,6 +14,8 @@ import requests import slack +from ..server import app + AUGUR_API_VERSION = 'api/unstable' @@ -242,109 +244,174 @@ # return filteredUser -def create_routes(server): +@app.route('/auggie/get_user', methods=['POST']) +def get_auggie_user(): + # arg = [request.json] + # response = server.transform(metrics.get_auggie_user, args=arg) + # return Response(response=response, status=200, mimetype="application/json") + ## From Method + profile_name = 'augur' + if os.environ.get('AUGUR_IS_PROD'): + profile_name = 'default' + client = boto3.Session(region_name='us-east-1', profile_name=profile_name).client('dynamodb') + response = client.get_item( + TableName="auggie-users", + Key={ + "email": {"S":'{}:{}'.format(body["email"],body["teamID"])} + } + ) + user = response['Item'] + + filteredUser = { + "interestedRepos":user["interestedRepos"], + "interestedGroups":user["interestedGroups"], + "host":user["host"] + } + + return filteredUser + +@app.route('/auggie/update_tracking', methods=['POST']) +def update_auggie_user_tracking(): + # arg = [request.json] + # response = server.transform(metrics.update_tracking, args=arg) + # return Response(response=response, status=200, mimetype="application/json") + ## From Method + profile_name = 'augur' + if os.environ.get('AUGUR_IS_PROD'): + profile_name = 'default' + client = boto3.Session(region_name='us-east-1', profile_name=profile_name).client('dynamodb') + response = client.update_item( + TableName="auggie-users", + Key={ + "email": {"S": '{}:{}'.format(body["email"], body["teamID"])} + }, + UpdateExpression="SET interestedGroups = :valGroup, interestedRepos = :valRepo, maxMessages = :valMax, host = :valHost, interestedInsightTypes = :valInterestedInsights", + ExpressionAttributeValues={ + ":valGroup": { + "L": body["groups"] + }, + ":valRepo": { + "L": body["repos"] + }, + ":valMax": { + "N": body["maxMessages"] + }, + ":valHost": { + "S": body["host"] + }, + ":valInterestedInsights": { + "L": body["insightTypes"] + } + }, + ReturnValues="ALL_NEW" + ) + + updated_values = response['Attributes'] + + filtered_values = { + "interestedRepos": updated_values["interestedRepos"], + "interestedGroups": updated_values["interestedGroups"], + "host": updated_values["host"] + } + + return filtered_values + +@app.route('/auggie/slack_login', methods=['POST']) +def slack_login(): + # arg = [request.json] + # response = server.transform(metrics.slack_login, args=arg) + # return Response(response=response, status=200, mimetype="application/json") + # From Method + print("slack_login") + + r = requests.get( + url=f'https://slack.com/api/oauth.v2.access?code={body["code"]}&client_id={os.environ["AUGGIE_CLIENT_ID"]}&client_secret={os.environ["AUGGIE_CLIENT_SECRET"]}&redirect_uri=http%3A%2F%2Flocalhost%3A8080') + data = r.json() + + if (data["ok"]): + print(data) + token = data["authed_user"]["access_token"] + team_id = data["team"]["id"] + webclient = slack.WebClient(token=token) + + user_response = webclient.users_identity() + print(user_response) + email = user_response["user"]["email"] - @server.app.route('/auggie/get_user', methods=['POST']) - def get_auggie_user(): - # arg = [request.json] - # response = server.transform(metrics.get_auggie_user, args=arg) - # return Response(response=response, status=200, mimetype="application/json") - ## From Method profile_name = 'augur' if os.environ.get('AUGUR_IS_PROD'): profile_name = 'default' - client = boto3.Session(region_name='us-east-1', profile_name=profile_name).client('dynamodb') + print("Making Boto3 Session") + client = boto3.Session(region_name='us-east-1', + profile_name=profile_name).client('dynamodb') response = client.get_item( - TableName="auggie-users", - Key={ - "email": {"S":'{}:{}'.format(body["email"],body["teamID"])} - } - ) - user = response['Item'] - - filteredUser = { - "interestedRepos":user["interestedRepos"], - "interestedGroups":user["interestedGroups"], - "host":user["host"] - } - - return filteredUser - - @server.app.route('/auggie/update_tracking', methods=['POST']) - def update_auggie_user_tracking(): - # arg = [request.json] - # response = server.transform(metrics.update_tracking, args=arg) - # return Response(response=response, status=200, mimetype="application/json") - ## From Method - profile_name = 'augur' - if os.environ.get('AUGUR_IS_PROD'): - profile_name = 'default' - client = boto3.Session(region_name='us-east-1', profile_name=profile_name).client('dynamodb') - response = client.update_item( TableName="auggie-users", Key={ - "email": {"S": '{}:{}'.format(body["email"], body["teamID"])} - }, - UpdateExpression="SET interestedGroups = :valGroup, interestedRepos = :valRepo, maxMessages = :valMax, host = :valHost, interestedInsightTypes = :valInterestedInsights", - ExpressionAttributeValues={ - ":valGroup": { - "L": body["groups"] - }, - ":valRepo": { - "L": body["repos"] - }, - ":valMax": { - "N": body["maxMessages"] - }, - ":valHost": { - "S": body["host"] - }, - ":valInterestedInsights": { - "L": body["insightTypes"] - } - }, - ReturnValues="ALL_NEW" + "email": {"S": '{}:{}'.format(email, team_id)} + } ) - updated_values = response['Attributes'] - - filtered_values = { - "interestedRepos": updated_values["interestedRepos"], - "interestedGroups": updated_values["interestedGroups"], - "host": updated_values["host"] - } - - return filtered_values - - @server.app.route('/auggie/slack_login', methods=['POST']) - def slack_login(): - # arg = [request.json] - # response = server.transform(metrics.slack_login, args=arg) - # return Response(response=response, status=200, mimetype="application/json") - # From Method - print("slack_login") - - r = requests.get( - url=f'https://slack.com/api/oauth.v2.access?code={body["code"]}&client_id={os.environ["AUGGIE_CLIENT_ID"]}&client_secret={os.environ["AUGGIE_CLIENT_SECRET"]}&redirect_uri=http%3A%2F%2Flocalhost%3A8080') - data = r.json() - - if (data["ok"]): - print(data) - token = data["authed_user"]["access_token"] - team_id = data["team"]["id"] - webclient = slack.WebClient(token=token) - - user_response = webclient.users_identity() - print(user_response) - email = user_response["user"]["email"] - - profile_name = 'augur' - if os.environ.get('AUGUR_IS_PROD'): - profile_name = 'default' - print("Making Boto3 Session") - client = boto3.Session(region_name='us-east-1', - profile_name=profile_name).client('dynamodb') + if ('Item' in response): + user = response['Item'] + print(user) + + filteredUser = { + "interestedRepos": user["interestedRepos"], + "interestedGroups": user["interestedGroups"], + "host": user["host"], + "maxMessages": user["maxMessages"], + "interestedInsights": user["interestedInsightTypes"] + } + + user_body = json.dumps({ + 'team_id': team_id, + 'email': email, + 'user': filteredUser + }) + + print(user_body) + + return user_body + else: + client.put_item( + TableName="auggie-users", + Item={ + 'botToken': {'S': 'null'}, + 'currentMessages': {'N': "0"}, + 'maxMessages': {'N': "0"}, + 'email': {'S': '{}:{}'.format(email, team_id)}, + 'host': {'S': 'null'}, + 'interestedGroups': {'L': []}, + 'interestedRepos': {'L': []}, + 'interestedInsightTypes': {'L': []}, + 'teamID': {'S': team_id}, + 'thread': {'S': 'null'}, + 'userID': {'S': user_response['user']['id']} + } + ) + + # users_response = webclient.users_list() + # for user in users_response["members"]: + # if "api_app_id" in user["profile"] and user["profile"]["api_app_id"] == "ASQKB8JT0": + # im_response = webclient.conversations_open( + # users=user["id"] + # ) + # print("Hopefully IM is opened") + # channel = im_response["channel"]["id"] + + # message_response = webclient.chat_postMessage( + # channel=channel, + # text="what repos?", + # as_user="true") + # print(message_response) + + # ts = message_response["ts"] + # webclient.chat_delete( + # channel=channel, + # ts=ts + # ) + response = client.get_item( TableName="auggie-users", Key={ @@ -352,92 +419,25 @@ def slack_login(): } ) - if ('Item' in response): - user = response['Item'] - print(user) + user = response['Item'] + print(user) - filteredUser = { - "interestedRepos": user["interestedRepos"], - "interestedGroups": user["interestedGroups"], - "host": user["host"], - "maxMessages": user["maxMessages"], - "interestedInsights": user["interestedInsightTypes"] - } + filteredUser = { + "interestedRepos": user["interestedRepos"], + "interestedGroups": user["interestedGroups"], + "host": user["host"], + "maxMessages": user["maxMessages"], + "interestedInsights": user["interestedInsightTypes"] + } - user_body = json.dumps({ - 'team_id': team_id, - 'email': email, - 'user': filteredUser - }) - - print(user_body) - - return user_body - else: - client.put_item( - TableName="auggie-users", - Item={ - 'botToken': {'S': 'null'}, - 'currentMessages': {'N': "0"}, - 'maxMessages': {'N': "0"}, - 'email': {'S': '{}:{}'.format(email, team_id)}, - 'host': {'S': 'null'}, - 'interestedGroups': {'L': []}, - 'interestedRepos': {'L': []}, - 'interestedInsightTypes': {'L': []}, - 'teamID': {'S': team_id}, - 'thread': {'S': 'null'}, - 'userID': {'S': user_response['user']['id']} - } - ) - - # users_response = webclient.users_list() - # for user in users_response["members"]: - # if "api_app_id" in user["profile"] and user["profile"]["api_app_id"] == "ASQKB8JT0": - # im_response = webclient.conversations_open( - # users=user["id"] - # ) - # print("Hopefully IM is opened") - # channel = im_response["channel"]["id"] - - # message_response = webclient.chat_postMessage( - # channel=channel, - # text="what repos?", - # as_user="true") - # print(message_response) - - # ts = message_response["ts"] - # webclient.chat_delete( - # channel=channel, - # ts=ts - # ) - - response = client.get_item( - TableName="auggie-users", - Key={ - "email": {"S": '{}:{}'.format(email, team_id)} - } - ) - - user = response['Item'] - print(user) - - filteredUser = { - "interestedRepos": user["interestedRepos"], - "interestedGroups": user["interestedGroups"], - "host": user["host"], - "maxMessages": user["maxMessages"], - "interestedInsights": user["interestedInsightTypes"] - } - - user_body = json.dumps({ - 'team_id': team_id, - 'email': email, - 'user': filteredUser - }) + user_body = json.dumps({ + 'team_id': team_id, + 'email': email, + 'user': filteredUser + }) - print(user_body) + print(user_body) - return user_body - else: - return data \ No newline at end of file + return user_body + else: + return data \ No newline at end of file diff --git a/augur/api/routes/batch.py b/augur/api/routes/batch.py index bb08bbc5a1..fbbe1a8545 100644 --- a/augur/api/routes/batch.py +++ b/augur/api/routes/batch.py @@ -11,147 +11,147 @@ from flask import request, Response from augur.api.util import metric_metadata import json +from ..server import app from augur.api.routes import AUGUR_API_VERSION logger = logging.getLogger(__name__) -def create_routes(server): - @server.app.route('/{}/batch'.format(AUGUR_API_VERSION), methods=['GET', 'POST']) - def batch(): - """ - Execute multiple requests, submitted as a batch. - :statuscode 207: Multi status - """ +@app.route('/{}/batch'.format(AUGUR_API_VERSION), methods=['GET', 'POST']) +def batch(): + """ + Execute multiple requests, submitted as a batch. + :statuscode 207: Multi status + """ - if request.method == 'GET': - """this will return sensible defaults in the future""" - return app.make_response('{"status": "501", "response": "Defaults for batch requests not implemented. Please POST a JSON array of requests to this endpoint for now."}') + if request.method == 'GET': + """this will return sensible defaults in the future""" + return app.make_response('{"status": "501", "response": "Defaults for batch requests not implemented. Please POST a JSON array of requests to this endpoint for now."}') - try: - requests = json.loads(request.data.decode('utf-8')) - except ValueError as e: - request.abort(400) - - responses = [] - - for index, req in enumerate(requests): - - - method = req['method'] - path = req['path'] - body = req.get('body', None) - - try: - - logger.debug('batch-internal-loop: %s %s' % (method, path)) - - with app.server.app.context(): - with app.test_request_context(path, - method=method, - data=body): - try: - # Can modify flask.g here without affecting - # flask.g of the root request for the batch - - # Pre process Request - rv = app.preprocess_request() - - if rv is None: - # Main Dispatch - rv = app.dispatch_request() - - except Exception as e: - rv = app.handle_user_exception(e) - - response = app.make_response(rv) - - # Post process Request - response = app.process_response(response) - - # Response is a Flask response object. - # _read_response(response) reads response.response - # and returns a string. If your endpoints return JSON object, - # this string would be the response as a JSON string. - responses.append({ - "path": path, - "status": response.status_code, - "response": str(response.get_data(), 'utf8'), - }) - - except Exception as e: - - responses.append({ - "path": path, - "status": 500, - "response": str(e) - }) - - - return Response(response=json.dumps(responses), - status=207, - mimetype="application/json") - - - """ - @api {post} /batch Batch Request Metadata - @apiName BatchMetadata - @apiGroup Batch - @apiDescription Returns metadata of batch requests - POST JSON of API requests metadata - """ - @server.app.route('/{}/batch/metadata'.format(AUGUR_API_VERSION), methods=['GET', 'POST']) - def batch_metadata(): - """ - Returns endpoint metadata in batch format - """ - - if request.method == 'GET': - """this will return sensible defaults in the future""" - return app.make_response(json.dumps(metric_metadata)) - - try: - requests = json.loads(request.data.decode('utf-8')) - except ValueError as e: - request.abort(400) - - responses = [] - - for index, req in enumerate(requests): - method = req['method'] - path = req['path'] - body = req.get('body', None) - - try: - logger.info('batch endpoint: ' + path) - with app.server.app.context(): - with app.test_request_context(path, - method=method, - data=body): - try: - rv = app.preprocess_request() - if rv is None: - rv = app.dispatch_request() - except Exception as e: - rv = app.handle_user_exception(e) - response = app.make_response(rv) - response = app.process_response(response) - - responses.append({ - "path": path, - "status": response.status_code, - "response": str(response.get_data(), 'utf8'), - }) - - except Exception as e: - responses.append({ - "path": path, - "status": 500, - "response": str(e) - }) - - return Response(response=json.dumps(responses), - status=207, - mimetype="application/json") + try: + requests = json.loads(request.data.decode('utf-8')) + except ValueError as e: + request.abort(400) + + responses = [] + + for index, req in enumerate(requests): + + + method = req['method'] + path = req['path'] + body = req.get('body', None) + + try: + + logger.debug('batch-internal-loop: %s %s' % (method, path)) + + with app.server.app.context(): + with app.test_request_context(path, + method=method, + data=body): + try: + # Can modify flask.g here without affecting + # flask.g of the root request for the batch + + # Pre process Request + rv = app.preprocess_request() + + if rv is None: + # Main Dispatch + rv = app.dispatch_request() + + except Exception as e: + rv = app.handle_user_exception(e) + + response = app.make_response(rv) + + # Post process Request + response = app.process_response(response) + + # Response is a Flask response object. + # _read_response(response) reads response.response + # and returns a string. If your endpoints return JSON object, + # this string would be the response as a JSON string. + responses.append({ + "path": path, + "status": response.status_code, + "response": str(response.get_data(), 'utf8'), + }) + + except Exception as e: + + responses.append({ + "path": path, + "status": 500, + "response": str(e) + }) + + + return Response(response=json.dumps(responses), + status=207, + mimetype="application/json") + + +""" +@api {post} /batch Batch Request Metadata +@apiName BatchMetadata +@apiGroup Batch +@apiDescription Returns metadata of batch requests +POST JSON of API requests metadata +""" +@app.route('/{}/batch/metadata'.format(AUGUR_API_VERSION), methods=['GET', 'POST']) +def batch_metadata(): + """ + Returns endpoint metadata in batch format + """ + + if request.method == 'GET': + """this will return sensible defaults in the future""" + return app.make_response(json.dumps(metric_metadata)) + + try: + requests = json.loads(request.data.decode('utf-8')) + except ValueError as e: + request.abort(400) + + responses = [] + + for index, req in enumerate(requests): + method = req['method'] + path = req['path'] + body = req.get('body', None) + + try: + logger.info('batch endpoint: ' + path) + with app.server.app.context(): + with app.test_request_context(path, + method=method, + data=body): + try: + rv = app.preprocess_request() + if rv is None: + rv = app.dispatch_request() + except Exception as e: + rv = app.handle_user_exception(e) + response = app.make_response(rv) + response = app.process_response(response) + + responses.append({ + "path": path, + "status": response.status_code, + "response": str(response.get_data(), 'utf8'), + }) + + except Exception as e: + responses.append({ + "path": path, + "status": 500, + "response": str(e) + }) + + return Response(response=json.dumps(responses), + status=207, + mimetype="application/json") diff --git a/augur/api/routes/collection_status.py b/augur/api/routes/collection_status.py index 49c62e2d76..44fd9407c2 100644 --- a/augur/api/routes/collection_status.py +++ b/augur/api/routes/collection_status.py @@ -5,170 +5,169 @@ from flask import Response from augur.api.routes import AUGUR_API_VERSION +from ..server import app, engine -def create_routes(server): +@app.route('/{}/collection_status/commits'.format(AUGUR_API_VERSION)) +def commit_collection_status(): # TODO: make this name automatic - wrapper? + commit_collection_sql = s.sql.text(""" + SELECT + repo_id, + repo_path, + repo_name, + repo_git, + repo_status + FROM + repo + WHERE + repo_status != 'Complete' + UNION + SELECT + repo_id, + repo_path, + repo_name, + repo_git, + repo_status + FROM + repo + WHERE + repo_status = 'Complete' + """) + results = pd.read_sql(commit_collection_sql, engine) + data = results.to_json( + orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") - @server.app.route('/{}/collection_status/commits'.format(AUGUR_API_VERSION)) - def commit_collection_status(): # TODO: make this name automatic - wrapper? - commit_collection_sql = s.sql.text(""" - SELECT - repo_id, - repo_path, - repo_name, - repo_git, - repo_status - FROM - repo - WHERE - repo_status != 'Complete' - UNION - SELECT - repo_id, - repo_path, - repo_name, - repo_git, - repo_status - FROM - repo - WHERE - repo_status = 'Complete' - """) - results = pd.read_sql(commit_collection_sql, server.engine) - data = results.to_json( - orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/collection_status/issues'.format(AUGUR_API_VERSION)) - def issue_collection_status(): # TODO: make this name automatic - wrapper? - issue_collection_sql = s.sql.text(""" - SELECT - * - FROM - ( - ( SELECT repo_id, issues_enabled, COUNT ( * ) AS meta_count - FROM repo_info - WHERE issues_count != 0 - GROUP BY repo_id, issues_enabled - ORDER BY repo_id ) zz - LEFT OUTER JOIN ( - SELECT --A.repo_id, - A.repo_name, - b.issues_count, - d.repo_id AS issue_repo_id, - e.last_collected, - f.most_recently_collected_issue, - COUNT ( * ) AS issue_count, - ( - b.issues_count - COUNT ( * )) AS issues_missing, - ABS ( - CAST (( COUNT ( * )) +1 AS DOUBLE PRECISION ) / CAST ( b.issues_count + 1 AS DOUBLE PRECISION )) AS ratio_abs, - ( - CAST (( COUNT ( * )) +1 AS DOUBLE PRECISION ) / CAST ( b.issues_count + 1 AS DOUBLE PRECISION )) AS ratio_issues - FROM - augur_data.repo A, - augur_data.issues d, - augur_data.repo_info b, - ( SELECT repo_id, MAX ( data_collection_date ) AS last_collected FROM augur_data.repo_info GROUP BY repo_id ORDER BY repo_id ) e, - ( SELECT repo_id, MAX ( data_collection_date ) AS most_recently_collected_issue FROM issues GROUP BY repo_id ORDER BY repo_id ) f - WHERE - A.repo_id = b.repo_id - AND lower(A.repo_git) like '%github.com%' - AND A.repo_id = d.repo_id - AND b.repo_id = d.repo_id - AND e.repo_id = A.repo_id - AND b.data_collection_date = e.last_collected - -- AND d.issue_id IS NULL - AND f.repo_id = A.repo_id - and d.pull_request is NULL - and b.issues_count is not NULL - GROUP BY - A.repo_id, - d.repo_id, - b.issues_count, - e.last_collected, - f.most_recently_collected_issue - ORDER BY ratio_abs - ) yy ON zz.repo_id = issue_repo_id - ) D - WHERE d.issues_enabled = 'true'; - """) - results = pd.read_sql(issue_collection_sql, server.engine) - data = results.to_json( - orient="records", date_format='iso', date_unit='ms') - parsed_data = json.loads(data) - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/collection_status/pull_requests'.format(AUGUR_API_VERSION)) - def pull_request_collection_status(): # TODO: make this name automatic - wrapper? - pull_request_collection_sql = s.sql.text(""" - SELECT - * - FROM - ( - SELECT - repo_info.repo_id, - repo.repo_name, - MAX ( pull_request_count ) AS max_pr_count, - COUNT ( * ) AS meta_count - FROM - repo_info, - repo -- WHERE issues_enabled = 'true' - WHERE - pull_request_count >= 1 - AND repo.repo_id = repo_info.repo_id - GROUP BY - repo_info.repo_id, - repo.repo_name - ORDER BY - repo_info.repo_id, - repo.repo_name - ) yy +@app.route('/{}/collection_status/issues'.format(AUGUR_API_VERSION)) +def issue_collection_status(): # TODO: make this name automatic - wrapper? + issue_collection_sql = s.sql.text(""" + SELECT + * + FROM + ( + ( SELECT repo_id, issues_enabled, COUNT ( * ) AS meta_count + FROM repo_info + WHERE issues_count != 0 + GROUP BY repo_id, issues_enabled + ORDER BY repo_id ) zz LEFT OUTER JOIN ( - SELECT -- A.repo_id, - --A.repo_name, - b.pull_request_count, - d.repo_id AS pull_request_repo_id, + SELECT --A.repo_id, + A.repo_name, + b.issues_count, + d.repo_id AS issue_repo_id, e.last_collected, - f.last_pr_collected, - COUNT ( * ) AS pull_requests_collected, - ( b.pull_request_count - COUNT ( * ) ) AS pull_requests_missing, - ABS ( CAST ( ( COUNT ( * ) ) + 1 AS DOUBLE PRECISION ) / CAST ( b.pull_request_count + 1 AS DOUBLE PRECISION ) ) AS ratio_abs, - ( CAST ( ( COUNT ( * ) ) + 1 AS DOUBLE PRECISION ) / CAST ( b.pull_request_count + 1 AS DOUBLE PRECISION ) ) AS ratio_issues + f.most_recently_collected_issue, + COUNT ( * ) AS issue_count, + ( + b.issues_count - COUNT ( * )) AS issues_missing, + ABS ( + CAST (( COUNT ( * )) +1 AS DOUBLE PRECISION ) / CAST ( b.issues_count + 1 AS DOUBLE PRECISION )) AS ratio_abs, + ( + CAST (( COUNT ( * )) +1 AS DOUBLE PRECISION ) / CAST ( b.issues_count + 1 AS DOUBLE PRECISION )) AS ratio_issues FROM augur_data.repo A, - augur_data.pull_requests d, + augur_data.issues d, augur_data.repo_info b, ( SELECT repo_id, MAX ( data_collection_date ) AS last_collected FROM augur_data.repo_info GROUP BY repo_id ORDER BY repo_id ) e, - ( SELECT repo_id, MAX ( data_collection_date ) AS last_pr_collected FROM augur_data.pull_requests GROUP BY repo_id ORDER BY repo_id ) f + ( SELECT repo_id, MAX ( data_collection_date ) AS most_recently_collected_issue FROM issues GROUP BY repo_id ORDER BY repo_id ) f WHERE A.repo_id = b.repo_id - AND LOWER ( A.repo_git ) LIKE'%github.com%' + AND lower(A.repo_git) like '%github.com%' AND A.repo_id = d.repo_id AND b.repo_id = d.repo_id AND e.repo_id = A.repo_id AND b.data_collection_date = e.last_collected - AND f.repo_id = A.repo_id -- AND d.pull_request_id IS NULL + -- AND d.issue_id IS NULL + AND f.repo_id = A.repo_id + and d.pull_request is NULL + and b.issues_count is not NULL GROUP BY A.repo_id, d.repo_id, - b.pull_request_count, + b.issues_count, e.last_collected, - f.last_pr_collected - ORDER BY - ratio_abs desc - ) zz ON yy.repo_id = pull_request_repo_id + f.most_recently_collected_issue + ORDER BY ratio_abs + ) yy ON zz.repo_id = issue_repo_id + ) D + WHERE d.issues_enabled = 'true'; + """) + results = pd.read_sql(issue_collection_sql, engine) + data = results.to_json( + orient="records", date_format='iso', date_unit='ms') + parsed_data = json.loads(data) + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/collection_status/pull_requests'.format(AUGUR_API_VERSION)) +def pull_request_collection_status(): # TODO: make this name automatic - wrapper? + pull_request_collection_sql = s.sql.text(""" + SELECT + * + FROM + ( + SELECT + repo_info.repo_id, + repo.repo_name, + MAX ( pull_request_count ) AS max_pr_count, + COUNT ( * ) AS meta_count + FROM + repo_info, + repo -- WHERE issues_enabled = 'true' + WHERE + pull_request_count >= 1 + AND repo.repo_id = repo_info.repo_id + GROUP BY + repo_info.repo_id, + repo.repo_name + ORDER BY + repo_info.repo_id, + repo.repo_name + ) yy + LEFT OUTER JOIN ( + SELECT -- A.repo_id, + --A.repo_name, + b.pull_request_count, + d.repo_id AS pull_request_repo_id, + e.last_collected, + f.last_pr_collected, + COUNT ( * ) AS pull_requests_collected, + ( b.pull_request_count - COUNT ( * ) ) AS pull_requests_missing, + ABS ( CAST ( ( COUNT ( * ) ) + 1 AS DOUBLE PRECISION ) / CAST ( b.pull_request_count + 1 AS DOUBLE PRECISION ) ) AS ratio_abs, + ( CAST ( ( COUNT ( * ) ) + 1 AS DOUBLE PRECISION ) / CAST ( b.pull_request_count + 1 AS DOUBLE PRECISION ) ) AS ratio_issues + FROM + augur_data.repo A, + augur_data.pull_requests d, + augur_data.repo_info b, + ( SELECT repo_id, MAX ( data_collection_date ) AS last_collected FROM augur_data.repo_info GROUP BY repo_id ORDER BY repo_id ) e, + ( SELECT repo_id, MAX ( data_collection_date ) AS last_pr_collected FROM augur_data.pull_requests GROUP BY repo_id ORDER BY repo_id ) f + WHERE + A.repo_id = b.repo_id + AND LOWER ( A.repo_git ) LIKE'%github.com%' + AND A.repo_id = d.repo_id + AND b.repo_id = d.repo_id + AND e.repo_id = A.repo_id + AND b.data_collection_date = e.last_collected + AND f.repo_id = A.repo_id -- AND d.pull_request_id IS NULL + GROUP BY + A.repo_id, + d.repo_id, + b.pull_request_count, + e.last_collected, + f.last_pr_collected ORDER BY - ratio_abs; - """) - results = pd.read_sql(pull_request_collection_sql, server.engine) - data = results.to_json( - orient="records", date_format='iso', date_unit='ms') - parsed_data = json.loads(data) - return Response(response=data, - status=200, - mimetype="application/json") + ratio_abs desc + ) zz ON yy.repo_id = pull_request_repo_id + ORDER BY + ratio_abs; + """) + results = pd.read_sql(pull_request_collection_sql, engine) + data = results.to_json( + orient="records", date_format='iso', date_unit='ms') + parsed_data = json.loads(data) + return Response(response=data, + status=200, + mimetype="application/json") diff --git a/augur/api/routes/config.py b/augur/api/routes/config.py index 968d0de417..c0a108cf9a 100644 --- a/augur/api/routes/config.py +++ b/augur/api/routes/config.py @@ -14,6 +14,7 @@ from augur.application.db.models import Config from augur.application.config import AugurConfig from augur.application.db.session import DatabaseSession +from ..server import app logger = logging.getLogger(__name__) development = get_development_flag() @@ -28,43 +29,42 @@ def generate_upgrade_request(): return response, 426 -def create_routes(server): - @server.app.route(f"/{AUGUR_API_VERSION}/config/get", methods=['GET', 'POST']) - def get_config(): - if not development and not request.is_secure: - return generate_upgrade_request() +@app.route(f"/{AUGUR_API_VERSION}/config/get", methods=['GET', 'POST']) +def get_config(): + if not development and not request.is_secure: + return generate_upgrade_request() - with DatabaseSession(logger) as session: - - config_dict = AugurConfig(logger, session).config.load_config() + with DatabaseSession(logger) as session: + + config_dict = AugurConfig(logger, session).config.load_config() - return jsonify(config_dict), 200 + return jsonify(config_dict), 200 - @server.app.route(f"/{AUGUR_API_VERSION}/config/update", methods=['POST']) - def update_config(): - if not development and not request.is_secure: - return generate_upgrade_request() +@app.route(f"/{AUGUR_API_VERSION}/config/update", methods=['POST']) +def update_config(): + if not development and not request.is_secure: + return generate_upgrade_request() - update_dict = request.get_json() + update_dict = request.get_json() - with DatabaseSession(logger) as session: + with DatabaseSession(logger) as session: - for section, data in update_dict.items(): + for section, data in update_dict.items(): - for key, value in data.items(): + for key, value in data.items(): - try: - config_setting = session.query(Config).filter(Config.section_name == section, Config.setting_name == key).one() - except s.orm.exc.NoResultFound: - return jsonify({"status": "Bad Request", "section": section, "setting": key}), 400 + try: + config_setting = session.query(Config).filter(Config.section_name == section, Config.setting_name == key).one() + except s.orm.exc.NoResultFound: + return jsonify({"status": "Bad Request", "section": section, "setting": key}), 400 - config_setting.value = value + config_setting.value = value - session.add(config_setting) + session.add(config_setting) - session.commit() + session.commit() - return jsonify({"status": "success"}), 200 + return jsonify({"status": "success"}), 200 diff --git a/augur/api/routes/contributor_reports.py b/augur/api/routes/contributor_reports.py index 0d599e6acf..1d09d4ac56 100644 --- a/augur/api/routes/contributor_reports.py +++ b/augur/api/routes/contributor_reports.py @@ -19,1251 +19,1251 @@ from bokeh.transform import cumsum from augur.api.routes import AUGUR_API_VERSION +from ..server import app, engine warnings.filterwarnings('ignore') -def create_routes(server): - def quarters(month, year): - if 1 <= month <= 3: - return '01' + '/' + year - elif 4 <= month <= 6: - return '04' + '/' + year - elif 5 <= month <= 9: - return '07' + '/' + year - elif 10 <= month <= 12: - return '10' + '/' + year - - def new_contributor_data_collection(repo_id, required_contributions): - - rank_list = [] - for num in range(1, required_contributions + 1): - rank_list.append(num) - rank_tuple = tuple(rank_list) - - contributor_query = salc.sql.text(f""" +def quarters(month, year): + if 1 <= month <= 3: + return '01' + '/' + year + elif 4 <= month <= 6: + return '04' + '/' + year + elif 5 <= month <= 9: + return '07' + '/' + year + elif 10 <= month <= 12: + return '10' + '/' + year + +def new_contributor_data_collection(repo_id, required_contributions): + + rank_list = [] + for num in range(1, required_contributions + 1): + rank_list.append(num) + rank_tuple = tuple(rank_list) + + contributor_query = salc.sql.text(f""" + + SELECT * FROM ( + SELECT ID AS + cntrb_id, + A.created_at AS created_at, + date_part('month', A.created_at::DATE) AS month, + date_part('year', A.created_at::DATE) AS year, + A.repo_id, + repo_name, + full_name, + login, + ACTION, + rank() OVER ( + PARTITION BY id + ORDER BY A.created_at ASC + ) + FROM + ( + ( + SELECT + canonical_id AS ID, + created_at AS created_at, + repo_id, + 'issue_opened' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM + augur_data.issues + LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = issues.reporter_id + LEFT OUTER JOIN ( + SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, + cntrb_canonical AS canonical_email, + data_collection_date, + cntrb_id AS canonical_id + FROM augur_data.contributors + WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + repo_id = {repo_id} + AND pull_request IS NULL + GROUP BY + canonical_id, + repo_id, + issues.created_at, + contributors.cntrb_full_name, + contributors.cntrb_login + ) UNION ALL + ( + SELECT + canonical_id AS ID, + TO_TIMESTAMP( cmt_author_date, 'YYYY-MM-DD' ) AS created_at, + repo_id, + 'commit' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM + augur_data.commits + LEFT OUTER JOIN augur_data.contributors ON cntrb_email = cmt_author_email + LEFT OUTER JOIN ( + SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, + cntrb_canonical AS canonical_email, + data_collection_date, cntrb_id AS canonical_id + FROM augur_data.contributors + WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + repo_id = {repo_id} + GROUP BY + repo_id, + canonical_email, + canonical_id, + commits.cmt_author_date, + contributors.cntrb_full_name, + contributors.cntrb_login + ) UNION ALL + ( + SELECT + message.cntrb_id AS ID, + created_at AS created_at, + commits.repo_id, + 'commit_comment' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + + FROM + augur_data.commit_comment_ref, + augur_data.commits, + augur_data.message + LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = message.cntrb_id + LEFT OUTER JOIN ( + SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, + cntrb_canonical AS canonical_email, + data_collection_date, cntrb_id AS canonical_id + FROM augur_data.contributors + WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + commits.cmt_id = commit_comment_ref.cmt_id + AND commits.repo_id = {repo_id} + AND commit_comment_ref.msg_id = message.msg_id - SELECT * FROM ( - SELECT ID AS - cntrb_id, - A.created_at AS created_at, - date_part('month', A.created_at::DATE) AS month, - date_part('year', A.created_at::DATE) AS year, - A.repo_id, - repo_name, - full_name, - login, - ACTION, - rank() OVER ( - PARTITION BY id - ORDER BY A.created_at ASC - ) - FROM + GROUP BY + ID, + commits.repo_id, + commit_comment_ref.created_at, + contributors.cntrb_full_name, + contributors.cntrb_login + ) UNION ALL + ( + SELECT + issue_events.cntrb_id AS ID, + issue_events.created_at AS created_at, + issues.repo_id, + 'issue_closed' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM + augur_data.issues, + augur_data.issue_events + LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = issue_events.cntrb_id + LEFT OUTER JOIN ( + SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, + cntrb_canonical AS canonical_email, + data_collection_date, + cntrb_id AS canonical_id + FROM augur_data.contributors + WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + issues.repo_id = {repo_id} + AND issues.issue_id = issue_events.issue_id + AND issues.pull_request IS NULL + AND issue_events.cntrb_id IS NOT NULL + AND ACTION = 'closed' + GROUP BY + issue_events.cntrb_id, + issues.repo_id, + issue_events.created_at, + contributors.cntrb_full_name, + contributors.cntrb_login + ) UNION ALL ( - ( - SELECT - canonical_id AS ID, - created_at AS created_at, - repo_id, - 'issue_opened' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - FROM - augur_data.issues - LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = issues.reporter_id - LEFT OUTER JOIN ( - SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, - cntrb_canonical AS canonical_email, - data_collection_date, - cntrb_id AS canonical_id - FROM augur_data.contributors - WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - repo_id = {repo_id} - AND pull_request IS NULL - GROUP BY - canonical_id, - repo_id, - issues.created_at, - contributors.cntrb_full_name, - contributors.cntrb_login - ) UNION ALL - ( - SELECT - canonical_id AS ID, - TO_TIMESTAMP( cmt_author_date, 'YYYY-MM-DD' ) AS created_at, - repo_id, - 'commit' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - FROM - augur_data.commits - LEFT OUTER JOIN augur_data.contributors ON cntrb_email = cmt_author_email - LEFT OUTER JOIN ( - SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, - cntrb_canonical AS canonical_email, - data_collection_date, cntrb_id AS canonical_id - FROM augur_data.contributors - WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - repo_id = {repo_id} - GROUP BY - repo_id, - canonical_email, - canonical_id, - commits.cmt_author_date, - contributors.cntrb_full_name, - contributors.cntrb_login - ) UNION ALL - ( - SELECT - message.cntrb_id AS ID, - created_at AS created_at, - commits.repo_id, - 'commit_comment' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - - FROM - augur_data.commit_comment_ref, - augur_data.commits, - augur_data.message - LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = message.cntrb_id - LEFT OUTER JOIN ( - SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, - cntrb_canonical AS canonical_email, - data_collection_date, cntrb_id AS canonical_id - FROM augur_data.contributors - WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - commits.cmt_id = commit_comment_ref.cmt_id - AND commits.repo_id = {repo_id} - AND commit_comment_ref.msg_id = message.msg_id - - GROUP BY - ID, - commits.repo_id, - commit_comment_ref.created_at, - contributors.cntrb_full_name, - contributors.cntrb_login - ) UNION ALL - ( - SELECT - issue_events.cntrb_id AS ID, - issue_events.created_at AS created_at, - issues.repo_id, - 'issue_closed' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - FROM - augur_data.issues, - augur_data.issue_events - LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = issue_events.cntrb_id - LEFT OUTER JOIN ( + SELECT + pr_augur_contributor_id AS ID, + pr_created_at AS created_at, + pull_requests.repo_id, + 'open_pull_request' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM + augur_data.pull_requests + LEFT OUTER JOIN augur_data.contributors ON pull_requests.pr_augur_contributor_id = contributors.cntrb_id + LEFT OUTER JOIN ( SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, cntrb_canonical AS canonical_email, data_collection_date, cntrb_id AS canonical_id FROM augur_data.contributors WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - issues.repo_id = {repo_id} - AND issues.issue_id = issue_events.issue_id - AND issues.pull_request IS NULL - AND issue_events.cntrb_id IS NOT NULL - AND ACTION = 'closed' - GROUP BY - issue_events.cntrb_id, - issues.repo_id, - issue_events.created_at, - contributors.cntrb_full_name, - contributors.cntrb_login - ) UNION ALL - ( - SELECT - pr_augur_contributor_id AS ID, - pr_created_at AS created_at, - pull_requests.repo_id, - 'open_pull_request' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - FROM - augur_data.pull_requests - LEFT OUTER JOIN augur_data.contributors ON pull_requests.pr_augur_contributor_id = contributors.cntrb_id - LEFT OUTER JOIN ( - SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, - cntrb_canonical AS canonical_email, - data_collection_date, - cntrb_id AS canonical_id - FROM augur_data.contributors - WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - pull_requests.repo_id = {repo_id} - GROUP BY - pull_requests.pr_augur_contributor_id, - pull_requests.repo_id, - pull_requests.pr_created_at, - contributors.cntrb_full_name, - contributors.cntrb_login - ) UNION ALL - ( - SELECT - message.cntrb_id AS ID, - msg_timestamp AS created_at, - pull_requests.repo_id as repo_id, - 'pull_request_comment' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - FROM - augur_data.pull_requests, - augur_data.pull_request_message_ref, - augur_data.message - LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = message.cntrb_id - LEFT OUTER JOIN ( - SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, - cntrb_canonical AS canonical_email, - data_collection_date, - cntrb_id AS canonical_id - FROM augur_data.contributors - WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - pull_requests.repo_id = {repo_id} - AND pull_request_message_ref.pull_request_id = pull_requests.pull_request_id - AND pull_request_message_ref.msg_id = message.msg_id - GROUP BY - message.cntrb_id, - pull_requests.repo_id, - message.msg_timestamp, - contributors.cntrb_full_name, - contributors.cntrb_login - ) UNION ALL - ( - SELECT - issues.reporter_id AS ID, - msg_timestamp AS created_at, - issues.repo_id as repo_id, - 'issue_comment' AS ACTION, - contributors.cntrb_full_name AS full_name, - contributors.cntrb_login AS login - FROM - issues, - issue_message_ref, - message - LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = message.cntrb_id - LEFT OUTER JOIN ( - SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, - cntrb_canonical AS canonical_email, - data_collection_date, - cntrb_id AS canonical_id - FROM augur_data.contributors - WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical - ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical - WHERE - issues.repo_id = {repo_id} - AND issue_message_ref.msg_id = message.msg_id - AND issues.issue_id = issue_message_ref.issue_id - AND issues.pull_request_id = NULL - GROUP BY - issues.reporter_id, - issues.repo_id, - message.msg_timestamp, - contributors.cntrb_full_name, - contributors.cntrb_login - ) - ) A, - repo - WHERE - ID IS NOT NULL - AND A.repo_id = repo.repo_id - GROUP BY - A.ID, - A.repo_id, - A.ACTION, - A.created_at, - repo.repo_name, - A.full_name, - A.login - ORDER BY - cntrb_id - ) b - WHERE RANK IN {rank_tuple} - - """) - df = pd.read_sql(contributor_query, server.engine) - - df = df.loc[~df['full_name'].str.contains('bot', na=False)] - df = df.loc[~df['login'].str.contains('bot', na=False)] - - df = df.loc[~df['cntrb_id'].isin(df[df.duplicated(['cntrb_id', 'created_at', 'repo_id', 'rank'])]['cntrb_id'])] - - # add yearmonths to contributor - df[['month', 'year']] = df[['month', 'year']].astype(int).astype(str) - df['yearmonth'] = df['month'] + '/' + df['year'] - df['yearmonth'] = pd.to_datetime(df['yearmonth']) - - # add column with every value being one, so when the contributor df is concatenated - # with the months df, the filler months won't be counted in the sums - df['new_contributors'] = 1 - - # add quarters to contributor dataframe - df['month'] = df['month'].astype(int) - df['quarter'] = df.apply(lambda x: quarters(x['month'], x['year']), axis=1, result_type='reduce') - df['quarter'] = pd.to_datetime(df['quarter']) + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + pull_requests.repo_id = {repo_id} + GROUP BY + pull_requests.pr_augur_contributor_id, + pull_requests.repo_id, + pull_requests.pr_created_at, + contributors.cntrb_full_name, + contributors.cntrb_login + ) UNION ALL + ( + SELECT + message.cntrb_id AS ID, + msg_timestamp AS created_at, + pull_requests.repo_id as repo_id, + 'pull_request_comment' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM + augur_data.pull_requests, + augur_data.pull_request_message_ref, + augur_data.message + LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = message.cntrb_id + LEFT OUTER JOIN ( + SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, + cntrb_canonical AS canonical_email, + data_collection_date, + cntrb_id AS canonical_id + FROM augur_data.contributors + WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + pull_requests.repo_id = {repo_id} + AND pull_request_message_ref.pull_request_id = pull_requests.pull_request_id + AND pull_request_message_ref.msg_id = message.msg_id + GROUP BY + message.cntrb_id, + pull_requests.repo_id, + message.msg_timestamp, + contributors.cntrb_full_name, + contributors.cntrb_login + ) UNION ALL + ( + SELECT + issues.reporter_id AS ID, + msg_timestamp AS created_at, + issues.repo_id as repo_id, + 'issue_comment' AS ACTION, + contributors.cntrb_full_name AS full_name, + contributors.cntrb_login AS login + FROM + issues, + issue_message_ref, + message + LEFT OUTER JOIN augur_data.contributors ON contributors.cntrb_id = message.cntrb_id + LEFT OUTER JOIN ( + SELECT DISTINCT ON ( cntrb_canonical ) cntrb_full_name, + cntrb_canonical AS canonical_email, + data_collection_date, + cntrb_id AS canonical_id + FROM augur_data.contributors + WHERE cntrb_canonical = cntrb_email ORDER BY cntrb_canonical + ) canonical_full_names ON canonical_full_names.canonical_email =contributors.cntrb_canonical + WHERE + issues.repo_id = {repo_id} + AND issue_message_ref.msg_id = message.msg_id + AND issues.issue_id = issue_message_ref.issue_id + AND issues.pull_request_id = NULL + GROUP BY + issues.reporter_id, + issues.repo_id, + message.msg_timestamp, + contributors.cntrb_full_name, + contributors.cntrb_login + ) + ) A, + repo + WHERE + ID IS NOT NULL + AND A.repo_id = repo.repo_id + GROUP BY + A.ID, + A.repo_id, + A.ACTION, + A.created_at, + repo.repo_name, + A.full_name, + A.login + ORDER BY + cntrb_id + ) b + WHERE RANK IN {rank_tuple} - return df + """) + df = pd.read_sql(contributor_query, engine) - def months_data_collection(start_date, end_date): + df = df.loc[~df['full_name'].str.contains('bot', na=False)] + df = df.loc[~df['login'].str.contains('bot', na=False)] - # months_query makes a df of years and months, this is used to fill - # the months with no data in the visualizations - months_query = salc.sql.text(f""" - SELECT * - FROM - ( - SELECT - date_part( 'year', created_month :: DATE ) AS year, - date_part( 'month', created_month :: DATE ) AS MONTH - FROM - (SELECT * - FROM ( - SELECT created_month :: DATE - FROM generate_series (TIMESTAMP '{start_date}', TIMESTAMP '{end_date}', INTERVAL '1 month' ) created_month ) d ) x - ) y - """) - months_df = pd.read_sql(months_query, server.engine) + df = df.loc[~df['cntrb_id'].isin(df[df.duplicated(['cntrb_id', 'created_at', 'repo_id', 'rank'])]['cntrb_id'])] + + # add yearmonths to contributor + df[['month', 'year']] = df[['month', 'year']].astype(int).astype(str) + df['yearmonth'] = df['month'] + '/' + df['year'] + df['yearmonth'] = pd.to_datetime(df['yearmonth']) - # add yearmonths to months_df - months_df[['year', 'month']] = months_df[['year', 'month']].astype(float).astype(int).astype(str) - months_df['yearmonth'] = months_df['month'] + '/' + months_df['year'] - months_df['yearmonth'] = pd.to_datetime(months_df['yearmonth']) + # add column with every value being one, so when the contributor df is concatenated + # with the months df, the filler months won't be counted in the sums + df['new_contributors'] = 1 - # filter months_df with start_date and end_date, the contributor df is filtered in the visualizations - months_df = months_df.set_index(months_df['yearmonth']) - months_df = months_df.loc[start_date: end_date].reset_index(drop=True) + # add quarters to contributor dataframe + df['month'] = df['month'].astype(int) + df['quarter'] = df.apply(lambda x: quarters(x['month'], x['year']), axis=1, result_type='reduce') + df['quarter'] = pd.to_datetime(df['quarter']) - # add quarters to months dataframe - months_df['month'] = months_df['month'].astype(int) - months_df['quarter'] = months_df.apply(lambda x: quarters(x['month'], x['year']), axis=1) - months_df['quarter'] = pd.to_datetime(months_df['quarter']) + return df - return months_df +def months_data_collection(start_date, end_date): - def get_repo_id_start_date_and_end_date(): + # months_query makes a df of years and months, this is used to fill + # the months with no data in the visualizations + months_query = salc.sql.text(f""" + SELECT * + FROM + ( + SELECT + date_part( 'year', created_month :: DATE ) AS year, + date_part( 'month', created_month :: DATE ) AS MONTH + FROM + (SELECT * + FROM ( + SELECT created_month :: DATE + FROM generate_series (TIMESTAMP '{start_date}', TIMESTAMP '{end_date}', INTERVAL '1 month' ) created_month ) d ) x + ) y + """) + months_df = pd.read_sql(months_query, engine) - now = datetime.datetime.now() + # add yearmonths to months_df + months_df[['year', 'month']] = months_df[['year', 'month']].astype(float).astype(int).astype(str) + months_df['yearmonth'] = months_df['month'] + '/' + months_df['year'] + months_df['yearmonth'] = pd.to_datetime(months_df['yearmonth']) - repo_id = request.args.get('repo_id') - start_date = str(request.args.get('start_date', "{}-01-01".format(now.year - 1))) - end_date = str(request.args.get('end_date', "{}-{}-{}".format(now.year, now.month, now.day))) + # filter months_df with start_date and end_date, the contributor df is filtered in the visualizations + months_df = months_df.set_index(months_df['yearmonth']) + months_df = months_df.loc[start_date: end_date].reset_index(drop=True) - if repo_id: + # add quarters to months dataframe + months_df['month'] = months_df['month'].astype(int) + months_df['quarter'] = months_df.apply(lambda x: quarters(x['month'], x['year']), axis=1) + months_df['quarter'] = pd.to_datetime(months_df['quarter']) - if start_date < end_date: - return int(repo_id), start_date, end_date, None - else: + return months_df + +def get_repo_id_start_date_and_end_date(): - error = { - "message": "Invalid end_date. end_date is before the start_date", - "status_code": 400 - } + now = datetime.datetime.now() - return int(repo_id), None, None, error + repo_id = request.args.get('repo_id') + start_date = str(request.args.get('start_date', "{}-01-01".format(now.year - 1))) + end_date = str(request.args.get('end_date', "{}-{}-{}".format(now.year, now.month, now.day))) + if repo_id: + + if start_date < end_date: + return int(repo_id), start_date, end_date, None else: + error = { - "message": "repo_id not specified. Use this endpoint to get a list of available repos: http:///api/unstable/repos", + "message": "Invalid end_date. end_date is before the start_date", "status_code": 400 } - return None, None, None, error - def filter_out_repeats_without_required_contributions_in_required_time(repeat_list, repeats_df, required_time, - first_list): + return int(repo_id), None, None, error - differences = [] - for i in range(0, len(repeat_list)): - time_difference = repeat_list[i] - first_list[i] - total = time_difference.days * 86400 + time_difference.seconds - differences.append(total) - repeats_df['differences'] = differences + else: + error = { + "message": "repo_id not specified. Use this endpoint to get a list of available repos: http:///api/unstable/repos", + "status_code": 400 + } + return None, None, None, error - # remove contributions who made enough contributions, but not in a short enough time - repeats_df = repeats_df.loc[repeats_df['differences'] <= required_time * 86400] +def filter_out_repeats_without_required_contributions_in_required_time(repeat_list, repeats_df, required_time, + first_list): - return repeats_df + differences = [] + for i in range(0, len(repeat_list)): + time_difference = repeat_list[i] - first_list[i] + total = time_difference.days * 86400 + time_difference.seconds + differences.append(total) + repeats_df['differences'] = differences - def compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, required_time, start_date): + # remove contributions who made enough contributions, but not in a short enough time + repeats_df = repeats_df.loc[repeats_df['differences'] <= required_time * 86400] - # create a copy of contributor dataframe - driver_df = input_df.copy() + return repeats_df - # remove first time contributors before begin date, along with their second contribution - mask = (driver_df['yearmonth'] < start_date) - driver_df = driver_df[~driver_df['cntrb_id'].isin(driver_df.loc[mask]['cntrb_id'])] +def compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, required_time, start_date): - # determine if contributor is a drive by by finding all the cntrb_id's that do not have a second contribution - repeats_df = driver_df.copy() + # create a copy of contributor dataframe + driver_df = input_df.copy() - repeats_df = repeats_df.loc[repeats_df['rank'].isin([1, required_contributions])] + # remove first time contributors before begin date, along with their second contribution + mask = (driver_df['yearmonth'] < start_date) + driver_df = driver_df[~driver_df['cntrb_id'].isin(driver_df.loc[mask]['cntrb_id'])] - # removes all the contributors that only have a first contirbution - repeats_df = repeats_df[ - repeats_df['cntrb_id'].isin(repeats_df.loc[driver_df['rank'] == required_contributions]['cntrb_id'])] + # determine if contributor is a drive by by finding all the cntrb_id's that do not have a second contribution + repeats_df = driver_df.copy() - repeat_list = repeats_df.loc[driver_df['rank'] == required_contributions]['created_at'].tolist() - first_list = repeats_df.loc[driver_df['rank'] == 1]['created_at'].tolist() + repeats_df = repeats_df.loc[repeats_df['rank'].isin([1, required_contributions])] - repeats_df = repeats_df.loc[driver_df['rank'] == 1] - repeats_df['type'] = 'repeat' + # removes all the contributors that only have a first contirbution + repeats_df = repeats_df[ + repeats_df['cntrb_id'].isin(repeats_df.loc[driver_df['rank'] == required_contributions]['cntrb_id'])] - repeats_df = filter_out_repeats_without_required_contributions_in_required_time( - repeat_list, repeats_df, required_time, first_list) + repeat_list = repeats_df.loc[driver_df['rank'] == required_contributions]['created_at'].tolist() + first_list = repeats_df.loc[driver_df['rank'] == 1]['created_at'].tolist() - repeats_df = repeats_df.loc[repeats_df['differences'] <= required_time * 86400] + repeats_df = repeats_df.loc[driver_df['rank'] == 1] + repeats_df['type'] = 'repeat' - repeat_cntrb_ids = repeats_df['cntrb_id'].to_list() + repeats_df = filter_out_repeats_without_required_contributions_in_required_time( + repeat_list, repeats_df, required_time, first_list) - drive_by_df = driver_df.loc[~driver_df['cntrb_id'].isin(repeat_cntrb_ids)] + repeats_df = repeats_df.loc[repeats_df['differences'] <= required_time * 86400] - drive_by_df = drive_by_df.loc[driver_df['rank'] == 1] - drive_by_df['type'] = 'drive_by' + repeat_cntrb_ids = repeats_df['cntrb_id'].to_list() - return drive_by_df, repeats_df + drive_by_df = driver_df.loc[~driver_df['cntrb_id'].isin(repeat_cntrb_ids)] - def add_caption_to_visualizations(caption, required_contributions, required_time, plot_width): + drive_by_df = drive_by_df.loc[driver_df['rank'] == 1] + drive_by_df['type'] = 'drive_by' - caption_plot = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + return drive_by_df, repeats_df - caption_plot.add_layout(Label( - x=0, - y=160, - x_units='screen', - y_units='screen', - text='{}'.format(caption.format(required_contributions, required_time)), - text_font='times', - text_font_size='15pt', - render_mode='css' - )) - caption_plot.outline_line_color = None +def add_caption_to_visualizations(caption, required_contributions, required_time, plot_width): - return caption_plot + caption_plot = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - def format_new_cntrb_bar_charts(plot, rank, group_by_format_string): + caption_plot.add_layout(Label( + x=0, + y=160, + x_units='screen', + y_units='screen', + text='{}'.format(caption.format(required_contributions, required_time)), + text_font='times', + text_font_size='15pt', + render_mode='css' + )) + caption_plot.outline_line_color = None - plot.xgrid.grid_line_color = None - plot.y_range.start = 0 - plot.axis.minor_tick_line_color = None - plot.outline_line_color = None + return caption_plot - plot.title.align = "center" - plot.title.text_font_size = "18px" +def format_new_cntrb_bar_charts(plot, rank, group_by_format_string): - plot.yaxis.axis_label = 'Second Time Contributors' if rank == 2 else 'New Contributors' - plot.xaxis.axis_label = group_by_format_string + plot.xgrid.grid_line_color = None + plot.y_range.start = 0 + plot.axis.minor_tick_line_color = None + plot.outline_line_color = None - plot.xaxis.axis_label_text_font_size = "18px" - plot.yaxis.axis_label_text_font_size = "16px" + plot.title.align = "center" + plot.title.text_font_size = "18px" - plot.xaxis.major_label_text_font_size = "16px" - plot.xaxis.major_label_orientation = 45.0 + plot.yaxis.axis_label = 'Second Time Contributors' if rank == 2 else 'New Contributors' + plot.xaxis.axis_label = group_by_format_string - plot.yaxis.major_label_text_font_size = "16px" + plot.xaxis.axis_label_text_font_size = "18px" + plot.yaxis.axis_label_text_font_size = "16px" - return plot + plot.xaxis.major_label_text_font_size = "16px" + plot.xaxis.major_label_orientation = 45.0 - def add_charts_and_captions_to_correct_positions(chart_plot, caption_plot, rank, contributor_type, - row_1, row_2, row_3, row_4): + plot.yaxis.major_label_text_font_size = "16px" - if rank == 1 and (contributor_type == 'All' or contributor_type == 'repeat'): - row_1.append(chart_plot) - row_2.append(caption_plot) - elif rank == 2 or contributor_type == 'drive_by': - row_3.append(chart_plot) - row_4.append(caption_plot) + return plot - def get_new_cntrb_bar_chart_query_params(): +def add_charts_and_captions_to_correct_positions(chart_plot, caption_plot, rank, contributor_type, + row_1, row_2, row_3, row_4): - group_by = str(request.args.get('group_by', "quarter")) - required_contributions = int(request.args.get('required_contributions', 4)) - required_time = int(request.args.get('required_time', 365)) + if rank == 1 and (contributor_type == 'All' or contributor_type == 'repeat'): + row_1.append(chart_plot) + row_2.append(caption_plot) + elif rank == 2 or contributor_type == 'drive_by': + row_3.append(chart_plot) + row_4.append(caption_plot) - return group_by, required_contributions, required_time +def get_new_cntrb_bar_chart_query_params(): - def remove_rows_before_start_date(df, start_date): + group_by = str(request.args.get('group_by', "quarter")) + required_contributions = int(request.args.get('required_contributions', 4)) + required_time = int(request.args.get('required_time', 365)) - mask = (df['yearmonth'] < start_date) - result_df = df[~df['cntrb_id'].isin(df.loc[mask]['cntrb_id'])] + return group_by, required_contributions, required_time - return result_df +def remove_rows_before_start_date(df, start_date): - def remove_rows_with_null_values(df, not_null_columns=[]): - """Remove null data from pandas df + mask = (df['yearmonth'] < start_date) + result_df = df[~df['cntrb_id'].isin(df.loc[mask]['cntrb_id'])] - Parameters - -- df - description: the dataframe that will be modified - type: Pandas Dataframe + return result_df - -- list_of_columns - description: columns that are searched for NULL values - type: list - default: [] (means all columns will be checked for NULL values) - IMPORTANT: if an empty list is passed or nothing is passed it will check all columns for NULL values +def remove_rows_with_null_values(df, not_null_columns=[]): + """Remove null data from pandas df - Return Value - -- Modified Pandas Dataframe - """ + Parameters + -- df + description: the dataframe that will be modified + type: Pandas Dataframe - if len(not_null_columns) == 0: - not_null_columns = df.columns.to_list() + -- list_of_columns + description: columns that are searched for NULL values + type: list + default: [] (means all columns will be checked for NULL values) + IMPORTANT: if an empty list is passed or nothing is passed it will check all columns for NULL values - total_rows_removed = 0 - for col in not_null_columns: - rows_removed = len(df.loc[df[col].isnull() is True]) + Return Value + -- Modified Pandas Dataframe + """ - if rows_removed > 0: - print(f"{rows_removed} rows have been removed because of null values in column {col}") - total_rows_removed += rows_removed + if len(not_null_columns) == 0: + not_null_columns = df.columns.to_list() - df = df.loc[df[col].isnull() is False] + total_rows_removed = 0 + for col in not_null_columns: + rows_removed = len(df.loc[df[col].isnull() is True]) - if total_rows_removed > 0: - print(f"\nTotal rows removed because of null data: {total_rows_removed}"); - else: - print("No null data found") + if rows_removed > 0: + print(f"{rows_removed} rows have been removed because of null values in column {col}") + total_rows_removed += rows_removed - return df + df = df.loc[df[col].isnull() is False] - def get_needed_columns(df, list_of_columns): - """Get only a specific list of columns from a Pandas Dataframe + if total_rows_removed > 0: + print(f"\nTotal rows removed because of null data: {total_rows_removed}"); + else: + print("No null data found") - Parameters - -- df - description: the dataframe that will be modified - type: Pandas Dataframe + return df - -- list_of_columns - description: columns that will be kept in dataframe - type: list +def get_needed_columns(df, list_of_columns): + """Get only a specific list of columns from a Pandas Dataframe - Return Value - -- Modified Pandas Dataframe - """ - return df[list_of_columns] + Parameters + -- df + description: the dataframe that will be modified + type: Pandas Dataframe - def filter_data(df, needed_columns, not_null_columns=[]): - """Filters out the unneeded rows in the df, and removed NULL data from df + -- list_of_columns + description: columns that will be kept in dataframe + type: list - Parameters - -- df - description: the dataframe that will be modified - type: Pandas Dataframe + Return Value + -- Modified Pandas Dataframe + """ + return df[list_of_columns] - -- needed_columns - description: the columns to keep in the dataframe +def filter_data(df, needed_columns, not_null_columns=[]): + """Filters out the unneeded rows in the df, and removed NULL data from df - -- not_null_columns - description: columns that will be searched for NULL data, - if NULL values are found those rows will be removed - default: [] (means all columns in needed_columns list will be checked for NULL values) - IMPORTANT: if an empty list is passed or nothing is passed it will check - all columns in needed_columns list for NULL values - Return Value - -- Modified Pandas Dataframe - """ + Parameters + -- df + description: the dataframe that will be modified + type: Pandas Dataframe - if all(x in needed_columns for x in not_null_columns): + -- needed_columns + description: the columns to keep in the dataframe - df = get_needed_columns(df, needed_columns) - #Use the pandas method bc the other method was erroring on boolean index. - #IM - 9/23/22 - df = df.dropna(subset=not_null_columns)#remove_rows_with_null_values(df, not_null_columns) + -- not_null_columns + description: columns that will be searched for NULL data, + if NULL values are found those rows will be removed + default: [] (means all columns in needed_columns list will be checked for NULL values) + IMPORTANT: if an empty list is passed or nothing is passed it will check + all columns in needed_columns list for NULL values + Return Value + -- Modified Pandas Dataframe + """ - return df - else: - print("Developer error, not null columns should be a subset of needed columns") - return df + if all(x in needed_columns for x in not_null_columns): - @server.app.route('/{}/contributor_reports/new_contributors_bar/'.format(AUGUR_API_VERSION), methods=["GET"]) - def new_contributors_bar(): + df = get_needed_columns(df, needed_columns) + #Use the pandas method bc the other method was erroring on boolean index. + #IM - 9/23/22 + df = df.dropna(subset=not_null_columns)#remove_rows_with_null_values(df, not_null_columns) - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + return df + else: + print("Developer error, not null columns should be a subset of needed columns") + return df - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) +@app.route('/{}/contributor_reports/new_contributors_bar/'.format(AUGUR_API_VERSION), methods=["GET"]) +def new_contributors_bar(): - group_by, required_contributions, required_time = get_new_cntrb_bar_chart_query_params() + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) - months_df = months_data_collection(start_date=start_date, end_date=end_date) + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - # TODO remove full_name from data for all charts since it is not needed in vis generation - not_null_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', - 'rank', 'yearmonth', 'new_contributors', 'quarter'] + group_by, required_contributions, required_time = get_new_cntrb_bar_chart_query_params() - #Use the pandas method bc the other method was erroring on boolean index. - #IM - 9/23/22 - input_df = input_df.dropna(subset=not_null_columns)#remove_rows_with_null_values(input_df, not_null_columns) + input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) + months_df = months_data_collection(start_date=start_date, end_date=end_date) - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + # TODO remove full_name from data for all charts since it is not needed in vis generation + not_null_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', + 'rank', 'yearmonth', 'new_contributors', 'quarter'] - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + #Use the pandas method bc the other method was erroring on boolean index. + #IM - 9/23/22 + input_df = input_df.dropna(subset=not_null_columns)#remove_rows_with_null_values(input_df, not_null_columns) - contributor_types = ['All', 'repeat', 'drive_by'] - ranks = [1, 2] + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - row_1, row_2, row_3, row_4 = [], [], [], [] + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} - all_df = remove_rows_before_start_date(input_df, start_date) + contributor_types = ['All', 'repeat', 'drive_by'] + ranks = [1, 2] - drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, - required_time, start_date) + row_1, row_2, row_3, row_4 = [], [], [], [] - for rank in ranks: - for contributor_type in contributor_types: + all_df = remove_rows_before_start_date(input_df, start_date) - # do not display these visualizations since drive-by's do not have second contributions, and the - # second contribution of a repeat contributor is the same thing as the all the second time contributors - if (rank == 2 and contributor_type == 'drive_by') or (rank == 2 and contributor_type == 'repeat'): - continue + drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, + required_time, start_date) - if contributor_type == 'repeat': - driver_df = repeats_df + for rank in ranks: + for contributor_type in contributor_types: - caption = """This graph shows repeat contributors in the specified time period. Repeat contributors - are contributors who have made {} or more contributions in {} days and their first contribution is - in the specified time period. New contributors are individuals who make their first contribution - in the specified time period.""" + # do not display these visualizations since drive-by's do not have second contributions, and the + # second contribution of a repeat contributor is the same thing as the all the second time contributors + if (rank == 2 and contributor_type == 'drive_by') or (rank == 2 and contributor_type == 'repeat'): + continue - elif contributor_type == 'drive_by': + if contributor_type == 'repeat': + driver_df = repeats_df - driver_df = drive_by_df + caption = """This graph shows repeat contributors in the specified time period. Repeat contributors + are contributors who have made {} or more contributions in {} days and their first contribution is + in the specified time period. New contributors are individuals who make their first contribution + in the specified time period.""" - caption = """This graph shows fly by contributors in the specified time period. Fly by contributors - are contributors who make less than the required {} contributions in {} days. New contributors are - individuals who make their first contribution in the specified time period. Of course, then, “All - fly-by’s are by definition first time contributors”. However, not all first time contributors are - fly-by’s.""" + elif contributor_type == 'drive_by': - elif contributor_type == 'All': + driver_df = drive_by_df - if rank == 1: - driver_df = all_df - # makes df with all first time contributors - driver_df = driver_df.loc[driver_df['rank'] == 1] - caption = """This graph shows all the first time contributors, whether they contribute once, or - contribute multiple times. New contributors are individuals who make their first contribution - in the specified time period.""" + caption = """This graph shows fly by contributors in the specified time period. Fly by contributors + are contributors who make less than the required {} contributions in {} days. New contributors are + individuals who make their first contribution in the specified time period. Of course, then, “All + fly-by’s are by definition first time contributors”. However, not all first time contributors are + fly-by’s.""" - if rank == 2: + elif contributor_type == 'All': - driver_df = all_df + if rank == 1: + driver_df = all_df + # makes df with all first time contributors + driver_df = driver_df.loc[driver_df['rank'] == 1] + caption = """This graph shows all the first time contributors, whether they contribute once, or + contribute multiple times. New contributors are individuals who make their first contribution + in the specified time period.""" - # creates df with all second time contributors - driver_df = driver_df.loc[driver_df['rank'] == 2] - caption = """This graph shows the second contribution of all - first time contributors in the specified time period.""" - # y_axis_label = 'Second Time Contributors' + if rank == 2: - # filter by end_date, this is not done with the begin date filtering because a repeat contributor - # will look like drive-by if the second contribution is removed by end_date filtering - mask = (driver_df['yearmonth'] < end_date) - driver_df = driver_df.loc[mask] + driver_df = all_df - # adds all months to driver_df so the lists of dates will include all months and years - driver_df = pd.concat([driver_df, months_df]) + # creates df with all second time contributors + driver_df = driver_df.loc[driver_df['rank'] == 2] + caption = """This graph shows the second contribution of all + first time contributors in the specified time period.""" + # y_axis_label = 'Second Time Contributors' - data = pd.DataFrame() - if group_by == 'year': + # filter by end_date, this is not done with the begin date filtering because a repeat contributor + # will look like drive-by if the second contribution is removed by end_date filtering + mask = (driver_df['yearmonth'] < end_date) + driver_df = driver_df.loc[mask] - data['dates'] = driver_df[group_by].unique() + # adds all months to driver_df so the lists of dates will include all months and years + driver_df = pd.concat([driver_df, months_df]) - # new contributor counts for y-axis - data['new_contributor_counts'] = driver_df.groupby([group_by]).sum().reset_index()[ - 'new_contributors'] + data = pd.DataFrame() + if group_by == 'year': - # used to format x-axis and title - group_by_format_string = "Year" + data['dates'] = driver_df[group_by].unique() - elif group_by == 'quarter' or group_by == 'month': + # new contributor counts for y-axis + data['new_contributor_counts'] = driver_df.groupby([group_by]).sum().reset_index()[ + 'new_contributors'] - # set variables to group the data by quarter or month - if group_by == 'quarter': - date_column = 'quarter' - group_by_format_string = "Quarter" + # used to format x-axis and title + group_by_format_string = "Year" - elif group_by == 'month': - date_column = 'yearmonth' - group_by_format_string = "Month" + elif group_by == 'quarter' or group_by == 'month': - # modifies the driver_df[date_column] to be a string with year and month, - # then finds all the unique values - data['dates'] = np.unique(np.datetime_as_string(driver_df[date_column], unit='M')) + # set variables to group the data by quarter or month + if group_by == 'quarter': + date_column = 'quarter' + group_by_format_string = "Quarter" - # new contributor counts for y-axis - data['new_contributor_counts'] = driver_df.groupby([date_column]).sum().reset_index()[ - 'new_contributors'] + elif group_by == 'month': + date_column = 'yearmonth' + group_by_format_string = "Month" - # if the data set is large enough it will dynamically assign the width, if the data set is - # too small it will by default set to 870 pixel so the title fits - if len(data['new_contributor_counts']) >= 15: - plot_width = 46 * len(data['new_contributor_counts']) - else: - plot_width = 870 + # modifies the driver_df[date_column] to be a string with year and month, + # then finds all the unique values + data['dates'] = np.unique(np.datetime_as_string(driver_df[date_column], unit='M')) - # create a dict convert an integer number into a word - # used to turn the rank into a word, so it is nicely displayed in the title - numbers = ['Zero', 'First', 'Second'] - num_conversion_dict = {} - for i in range(1, len(numbers)): - num_conversion_dict[i] = numbers[i] - number = '{}'.format(num_conversion_dict[rank]) + # new contributor counts for y-axis + data['new_contributor_counts'] = driver_df.groupby([date_column]).sum().reset_index()[ + 'new_contributors'] - # define pot for bar chart - p = figure(x_range=data['dates'], plot_height=400, plot_width=plot_width, - title="{}: {} {} Time Contributors Per {}".format(repo_dict[repo_id], - contributor_type.capitalize(), number, - group_by_format_string), - y_range=(0, max(data['new_contributor_counts']) * 1.15), margin=(0, 0, 10, 0)) + # if the data set is large enough it will dynamically assign the width, if the data set is + # too small it will by default set to 870 pixel so the title fits + if len(data['new_contributor_counts']) >= 15: + plot_width = 46 * len(data['new_contributor_counts']) + else: + plot_width = 870 - p.vbar(x=data['dates'], top=data['new_contributor_counts'], width=0.8) + # create a dict convert an integer number into a word + # used to turn the rank into a word, so it is nicely displayed in the title + numbers = ['Zero', 'First', 'Second'] + num_conversion_dict = {} + for i in range(1, len(numbers)): + num_conversion_dict[i] = numbers[i] + number = '{}'.format(num_conversion_dict[rank]) - source = ColumnDataSource( - data=dict(dates=data['dates'], new_contributor_counts=data['new_contributor_counts'])) + # define pot for bar chart + p = figure(x_range=data['dates'], plot_height=400, plot_width=plot_width, + title="{}: {} {} Time Contributors Per {}".format(repo_dict[repo_id], + contributor_type.capitalize(), number, + group_by_format_string), + y_range=(0, max(data['new_contributor_counts']) * 1.15), margin=(0, 0, 10, 0)) - # add contributor_count labels to chart - p.add_layout(LabelSet(x='dates', y='new_contributor_counts', text='new_contributor_counts', y_offset=4, - text_font_size="13pt", text_color="black", - source=source, text_align='center')) + p.vbar(x=data['dates'], top=data['new_contributor_counts'], width=0.8) - plot = format_new_cntrb_bar_charts(p, rank, group_by_format_string) + source = ColumnDataSource( + data=dict(dates=data['dates'], new_contributor_counts=data['new_contributor_counts'])) - caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) + # add contributor_count labels to chart + p.add_layout(LabelSet(x='dates', y='new_contributor_counts', text='new_contributor_counts', y_offset=4, + text_font_size="13pt", text_color="black", + source=source, text_align='center')) - add_charts_and_captions_to_correct_positions(plot, caption_plot, rank, contributor_type, row_1, - row_2, row_3, row_4) + plot = format_new_cntrb_bar_charts(p, rank, group_by_format_string) - # puts plots together into a grid - grid = gridplot([row_1, row_2, row_3, row_4]) + caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) - filename = export_png(grid) + add_charts_and_captions_to_correct_positions(plot, caption_plot, rank, contributor_type, row_1, + row_2, row_3, row_4) - return send_file(filename) + # puts plots together into a grid + grid = gridplot([row_1, row_2, row_3, row_4]) - @server.app.route('/{}/contributor_reports/new_contributors_stacked_bar/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def new_contributors_stacked_bar(): + filename = export_png(grid) - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + return send_file(filename) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) +@app.route('/{}/contributor_reports/new_contributors_stacked_bar/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def new_contributors_stacked_bar(): - group_by, required_contributions, required_time = get_new_cntrb_bar_chart_query_params() + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) - months_df = months_data_collection(start_date=start_date, end_date=end_date) + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - needed_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', - 'rank', 'yearmonth', 'new_contributors', 'quarter'] + group_by, required_contributions, required_time = get_new_cntrb_bar_chart_query_params() - input_df = filter_data(input_df, needed_columns) + input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) + months_df = months_data_collection(start_date=start_date, end_date=end_date) - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + needed_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', + 'rank', 'yearmonth', 'new_contributors', 'quarter'] - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + input_df = filter_data(input_df, needed_columns) - contributor_types = ['All', 'repeat', 'drive_by'] - ranks = [1, 2] + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - row_1, row_2, row_3, row_4 = [], [], [], [] + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} - all_df = remove_rows_before_start_date(input_df, start_date) + contributor_types = ['All', 'repeat', 'drive_by'] + ranks = [1, 2] - drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, - required_time, start_date) + row_1, row_2, row_3, row_4 = [], [], [], [] - for rank in ranks: - for contributor_type in contributor_types: - # do not display these visualizations since drive-by's do not have second contributions, - # and the second contribution of a repeat contributor is the same thing as the all the - # second time contributors - if (rank == 2 and contributor_type == 'drive_by') or (rank == 2 and contributor_type == 'repeat'): - continue + all_df = remove_rows_before_start_date(input_df, start_date) - if contributor_type == 'repeat': - driver_df = repeats_df + drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, + required_time, start_date) - caption = """This graph shows repeat contributors in the specified time period. Repeat contributors - are contributors who have made {} or more contributions in {} days and their first contribution is - in the specified time period. New contributors are individuals who make their first contribution in - the specified time period.""" + for rank in ranks: + for contributor_type in contributor_types: + # do not display these visualizations since drive-by's do not have second contributions, + # and the second contribution of a repeat contributor is the same thing as the all the + # second time contributors + if (rank == 2 and contributor_type == 'drive_by') or (rank == 2 and contributor_type == 'repeat'): + continue - elif contributor_type == 'drive_by': + if contributor_type == 'repeat': + driver_df = repeats_df - driver_df = drive_by_df + caption = """This graph shows repeat contributors in the specified time period. Repeat contributors + are contributors who have made {} or more contributions in {} days and their first contribution is + in the specified time period. New contributors are individuals who make their first contribution in + the specified time period.""" - caption = """This graph shows fly by contributors in the specified time period. Fly by contributors - are contributors who make less than the required {} contributions in {} days. New contributors are - individuals who make their first contribution in the specified time period. Of course, then, “All - fly-by’s are by definition first time contributors”. However, not all first time contributors are - fly-by’s.""" + elif contributor_type == 'drive_by': - elif contributor_type == 'All': - if rank == 1: - driver_df = all_df + driver_df = drive_by_df - # makes df with all first time contributors - driver_df = driver_df.loc[driver_df['rank'] == 1] + caption = """This graph shows fly by contributors in the specified time period. Fly by contributors + are contributors who make less than the required {} contributions in {} days. New contributors are + individuals who make their first contribution in the specified time period. Of course, then, “All + fly-by’s are by definition first time contributors”. However, not all first time contributors are + fly-by’s.""" - caption = """This graph shows all the first time contributors, whether they contribute once, or - contribute multiple times. New contributors are individuals who make their first contribution in - the specified time period.""" + elif contributor_type == 'All': + if rank == 1: + driver_df = all_df - if rank == 2: - driver_df = all_df + # makes df with all first time contributors + driver_df = driver_df.loc[driver_df['rank'] == 1] - # creates df with all second time contributor - driver_df = driver_df.loc[driver_df['rank'] == 2] - caption = """This graph shows the second contribution of all first time - contributors in the specified time period.""" - # y_axis_label = 'Second Time Contributors' + caption = """This graph shows all the first time contributors, whether they contribute once, or + contribute multiple times. New contributors are individuals who make their first contribution in + the specified time period.""" - # filter by end_date, this is not done with the begin date filtering because a repeat contributor will - # look like drive-by if the second contribution is removed by end_date filtering - mask = (driver_df['yearmonth'] < end_date) - driver_df = driver_df.loc[mask] + if rank == 2: + driver_df = all_df - # adds all months to driver_df so the lists of dates will include all months and years - driver_df = pd.concat([driver_df, months_df]) + # creates df with all second time contributor + driver_df = driver_df.loc[driver_df['rank'] == 2] + caption = """This graph shows the second contribution of all first time + contributors in the specified time period.""" + # y_axis_label = 'Second Time Contributors' - actions = ['open_pull_request', 'pull_request_comment', 'commit', 'issue_closed', 'issue_opened', - 'issue_comment'] + # filter by end_date, this is not done with the begin date filtering because a repeat contributor will + # look like drive-by if the second contribution is removed by end_date filtering + mask = (driver_df['yearmonth'] < end_date) + driver_df = driver_df.loc[mask] - data = pd.DataFrame() - if group_by == 'year': + # adds all months to driver_df so the lists of dates will include all months and years + driver_df = pd.concat([driver_df, months_df]) - # x-axis dates - data['dates'] = driver_df[group_by].unique() + actions = ['open_pull_request', 'pull_request_comment', 'commit', 'issue_closed', 'issue_opened', + 'issue_comment'] - for contribution_type in actions: - data[contribution_type] = \ - pd.concat([driver_df.loc[driver_df['action'] == contribution_type], months_df]).groupby( - group_by).sum().reset_index()['new_contributors'] + data = pd.DataFrame() + if group_by == 'year': - # new contributor counts for all actions - data['new_contributor_counts'] = driver_df.groupby([group_by]).sum().reset_index()[ - 'new_contributors'] + # x-axis dates + data['dates'] = driver_df[group_by].unique() - # used to format x-axis and graph title - group_by_format_string = "Year" + for contribution_type in actions: + data[contribution_type] = \ + pd.concat([driver_df.loc[driver_df['action'] == contribution_type], months_df]).groupby( + group_by).sum().reset_index()['new_contributors'] - elif group_by == 'quarter' or group_by == 'month': + # new contributor counts for all actions + data['new_contributor_counts'] = driver_df.groupby([group_by]).sum().reset_index()[ + 'new_contributors'] - # set variables to group the data by quarter or month - if group_by == 'quarter': - date_column = 'quarter' - group_by_format_string = "Quarter" + # used to format x-axis and graph title + group_by_format_string = "Year" - elif group_by == 'month': - date_column = 'yearmonth' - group_by_format_string = "Month" + elif group_by == 'quarter' or group_by == 'month': - # modifies the driver_df[date_column] to be a string with year and month, - # then finds all the unique values - data['dates'] = np.unique(np.datetime_as_string(driver_df[date_column], unit='M')) + # set variables to group the data by quarter or month + if group_by == 'quarter': + date_column = 'quarter' + group_by_format_string = "Quarter" - # new_contributor counts for each type of action - for contribution_type in actions: - data[contribution_type] = \ - pd.concat([driver_df.loc[driver_df['action'] == contribution_type], months_df]).groupby( - date_column).sum().reset_index()['new_contributors'] + elif group_by == 'month': + date_column = 'yearmonth' + group_by_format_string = "Month" - print(data.to_string()) + # modifies the driver_df[date_column] to be a string with year and month, + # then finds all the unique values + data['dates'] = np.unique(np.datetime_as_string(driver_df[date_column], unit='M')) - # new contributor counts for all actions - data['new_contributor_counts'] = driver_df.groupby([date_column]).sum().reset_index()[ - 'new_contributors'] + # new_contributor counts for each type of action + for contribution_type in actions: + data[contribution_type] = \ + pd.concat([driver_df.loc[driver_df['action'] == contribution_type], months_df]).groupby( + date_column).sum().reset_index()['new_contributors'] - # if the data set is large enough it will dynamically assign the width, if the data set is too small it - # will by default set to 870 pixel so the title fits - if len(data['new_contributor_counts']) >= 15: - plot_width = 46 * len(data['new_contributor_counts']) + 200 - else: - plot_width = 870 + print(data.to_string()) - # create list of values for data source dict - actions_df_references = [] - for action in actions: - actions_df_references.append(data[action]) + # new contributor counts for all actions + data['new_contributor_counts'] = driver_df.groupby([date_column]).sum().reset_index()[ + 'new_contributors'] - # created dict with the actions as the keys, and the values as the values from the df - data_source = {actions[i]: actions_df_references[i] for i in range(len(actions))} - data_source.update({'dates': data['dates'], 'New Contributor Counts': data['new_contributor_counts']}) + # if the data set is large enough it will dynamically assign the width, if the data set is too small it + # will by default set to 870 pixel so the title fits + if len(data['new_contributor_counts']) >= 15: + plot_width = 46 * len(data['new_contributor_counts']) + 200 + else: + plot_width = 870 - colors = Colorblind[len(actions)] + # create list of values for data source dict + actions_df_references = [] + for action in actions: + actions_df_references.append(data[action]) - source = ColumnDataSource(data=data_source) + # created dict with the actions as the keys, and the values as the values from the df + data_source = {actions[i]: actions_df_references[i] for i in range(len(actions))} + data_source.update({'dates': data['dates'], 'New Contributor Counts': data['new_contributor_counts']}) - # create a dict convert an integer number into a word - # used to turn the rank into a word, so it is nicely displayed in the title - numbers = ['Zero', 'First', 'Second'] - num_conversion_dict = {} - for i in range(1, len(numbers)): - num_conversion_dict[i] = numbers[i] - number = '{}'.format(num_conversion_dict[rank]) + colors = Colorblind[len(actions)] - # y_max = 20 - # creates plot to hold chart - p = figure(x_range=data['dates'], plot_height=400, plot_width=plot_width, - title='{}: {} {} Time Contributors Per {}'.format(repo_dict[repo_id], - contributor_type.capitalize(), number, - group_by_format_string), - toolbar_location=None, y_range=(0, max(data['new_contributor_counts']) * 1.15)) - # max(data['new_contributor_counts'])* 1.15), margin = (0, 0, 0, 0)) + source = ColumnDataSource(data=data_source) - vbar = p.vbar_stack(actions, x='dates', width=0.8, color=colors, source=source) + # create a dict convert an integer number into a word + # used to turn the rank into a word, so it is nicely displayed in the title + numbers = ['Zero', 'First', 'Second'] + num_conversion_dict = {} + for i in range(1, len(numbers)): + num_conversion_dict[i] = numbers[i] + number = '{}'.format(num_conversion_dict[rank]) - # add total count labels - p.add_layout(LabelSet(x='dates', y='New Contributor Counts', text='New Contributor Counts', y_offset=4, - text_font_size="14pt", - text_color="black", source=source, text_align='center')) + # y_max = 20 + # creates plot to hold chart + p = figure(x_range=data['dates'], plot_height=400, plot_width=plot_width, + title='{}: {} {} Time Contributors Per {}'.format(repo_dict[repo_id], + contributor_type.capitalize(), number, + group_by_format_string), + toolbar_location=None, y_range=(0, max(data['new_contributor_counts']) * 1.15)) + # max(data['new_contributor_counts'])* 1.15), margin = (0, 0, 0, 0)) - # add legend - legend = Legend(items=[(date, [action]) for (date, action) in zip(actions, vbar)], location=(0, 120), - label_text_font_size="16px") - p.add_layout(legend, 'right') + vbar = p.vbar_stack(actions, x='dates', width=0.8, color=colors, source=source) - plot = format_new_cntrb_bar_charts(p, rank, group_by_format_string) + # add total count labels + p.add_layout(LabelSet(x='dates', y='New Contributor Counts', text='New Contributor Counts', y_offset=4, + text_font_size="14pt", + text_color="black", source=source, text_align='center')) - caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) + # add legend + legend = Legend(items=[(date, [action]) for (date, action) in zip(actions, vbar)], location=(0, 120), + label_text_font_size="16px") + p.add_layout(legend, 'right') - add_charts_and_captions_to_correct_positions(plot, caption_plot, rank, contributor_type, row_1, - row_2, row_3, row_4) + plot = format_new_cntrb_bar_charts(p, rank, group_by_format_string) - # puts plots together into a grid - grid = gridplot([row_1, row_2, row_3, row_4]) + caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) - filename = export_png(grid) + add_charts_and_captions_to_correct_positions(plot, caption_plot, rank, contributor_type, row_1, + row_2, row_3, row_4) - return send_file(filename) + # puts plots together into a grid + grid = gridplot([row_1, row_2, row_3, row_4]) - @server.app.route('/{}/contributor_reports/returning_contributors_pie_chart/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def returning_contributors_pie_chart(): + filename = export_png(grid) - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + return send_file(filename) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) +@app.route('/{}/contributor_reports/returning_contributors_pie_chart/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def returning_contributors_pie_chart(): - required_contributions = int(request.args.get('required_contributions', 4)) - required_time = int(request.args.get('required_time', 365)) + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - needed_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', - 'rank', 'yearmonth', 'new_contributors', 'quarter'] + required_contributions = int(request.args.get('required_contributions', 4)) + required_time = int(request.args.get('required_time', 365)) - input_df = filter_data(input_df, needed_columns) + input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + needed_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', + 'rank', 'yearmonth', 'new_contributors', 'quarter'] - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + input_df = filter_data(input_df, needed_columns) - drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, - required_time, start_date) + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - print(repeats_df.to_string()) + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} - driver_df = pd.concat([drive_by_df, repeats_df]) + drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, + required_time, start_date) - # filter df by end date - mask = (driver_df['yearmonth'] < end_date) - driver_df = driver_df.loc[mask] + print(repeats_df.to_string()) - # first and second time contributor counts - drive_by_contributors = driver_df.loc[driver_df['type'] == 'drive_by'].count()['new_contributors'] - repeat_contributors = driver_df.loc[driver_df['type'] == 'repeat'].count()['new_contributors'] + driver_df = pd.concat([drive_by_df, repeats_df]) - # create a dict with the # of drive-by and repeat contributors - x = {'Drive_By': drive_by_contributors, - 'Repeat': repeat_contributors} + # filter df by end date + mask = (driver_df['yearmonth'] < end_date) + driver_df = driver_df.loc[mask] - # turn dict 'x' into a dataframe with columns 'contributor_type', and 'counts' - data = pd.Series(x).reset_index(name='counts').rename(columns={'index': 'contributor_type'}) + # first and second time contributor counts + drive_by_contributors = driver_df.loc[driver_df['type'] == 'drive_by'].count()['new_contributors'] + repeat_contributors = driver_df.loc[driver_df['type'] == 'repeat'].count()['new_contributors'] - data['angle'] = data['counts'] / data['counts'].sum() * 2 * pi - data['color'] = ('#0072B2', '#E69F00') - data['percentage'] = ((data['angle'] / (2 * pi)) * 100).round(2) + # create a dict with the # of drive-by and repeat contributors + x = {'Drive_By': drive_by_contributors, + 'Repeat': repeat_contributors} - # format title - title = "{}: Number of Returning " \ - "Contributors out of {} from {} to {}" \ - .format(repo_dict[repo_id], drive_by_contributors + repeat_contributors, start_date, end_date) + # turn dict 'x' into a dataframe with columns 'contributor_type', and 'counts' + data = pd.Series(x).reset_index(name='counts').rename(columns={'index': 'contributor_type'}) - title_text_font_size = 18 + data['angle'] = data['counts'] / data['counts'].sum() * 2 * pi + data['color'] = ('#0072B2', '#E69F00') + data['percentage'] = ((data['angle'] / (2 * pi)) * 100).round(2) - plot_width = 850 + # format title + title = "{}: Number of Returning " \ + "Contributors out of {} from {} to {}" \ + .format(repo_dict[repo_id], drive_by_contributors + repeat_contributors, start_date, end_date) - # sets plot_width to width of title if title is wider than 850 pixels - if len(title) * title_text_font_size / 2 > plot_width: - plot_width = int(len(title) * title_text_font_size / 2) + title_text_font_size = 18 - # creates plot for chart - p = figure(plot_height=450, plot_width=plot_width, title=title, - toolbar_location=None, x_range=(-0.5, 1.3), tools='hover', tooltips="@contributor_type", - margin=(0, 0, 0, 0)) + plot_width = 850 - p.wedge(x=0.87, y=1, radius=0.4, start_angle=cumsum('angle', include_zero=True), - end_angle=cumsum('angle'), line_color=None, fill_color='color', - legend_field='contributor_type', source=data) + # sets plot_width to width of title if title is wider than 850 pixels + if len(title) * title_text_font_size / 2 > plot_width: + plot_width = int(len(title) * title_text_font_size / 2) - start_point = 0.88 - for i in range(0, len(data['percentage'])): - # percentages - p.add_layout(Label(x=-0.17, y=start_point + 0.13 * (len(data['percentage']) - 1 - i), - text='{}%'.format(data.iloc[i]['percentage']), - render_mode='css', text_font_size='15px', text_font_style='bold')) + # creates plot for chart + p = figure(plot_height=450, plot_width=plot_width, title=title, + toolbar_location=None, x_range=(-0.5, 1.3), tools='hover', tooltips="@contributor_type", + margin=(0, 0, 0, 0)) - # contributors - p.add_layout(Label(x=0.12, y=start_point + 0.13 * (len(data['percentage']) - 1 - i), - text='{}'.format(data.iloc[i]['counts']), - render_mode='css', text_font_size='15px', text_font_style='bold')) + p.wedge(x=0.87, y=1, radius=0.4, start_angle=cumsum('angle', include_zero=True), + end_angle=cumsum('angle'), line_color=None, fill_color='color', + legend_field='contributor_type', source=data) - # percentages header - p.add_layout( - Label(x=-0.22, y=start_point + 0.13 * (len(data['percentage'])), text='Percentages', render_mode='css', - text_font_size='15px', text_font_style='bold')) + start_point = 0.88 + for i in range(0, len(data['percentage'])): + # percentages + p.add_layout(Label(x=-0.17, y=start_point + 0.13 * (len(data['percentage']) - 1 - i), + text='{}%'.format(data.iloc[i]['percentage']), + render_mode='css', text_font_size='15px', text_font_style='bold')) - # legend header - p.add_layout( - Label(x=-0.43, y=start_point + 0.13 * (len(data['percentage'])), text='Category', render_mode='css', - text_font_size='15px', text_font_style='bold')) + # contributors + p.add_layout(Label(x=0.12, y=start_point + 0.13 * (len(data['percentage']) - 1 - i), + text='{}'.format(data.iloc[i]['counts']), + render_mode='css', text_font_size='15px', text_font_style='bold')) - # contributors header - p.add_layout( - Label(x=0, y=start_point + 0.13 * (len(data['percentage'])), text='# Contributors', render_mode='css', - text_font_size='15px', text_font_style='bold')) + # percentages header + p.add_layout( + Label(x=-0.22, y=start_point + 0.13 * (len(data['percentage'])), text='Percentages', render_mode='css', + text_font_size='15px', text_font_style='bold')) - p.axis.axis_label = None - p.axis.visible = False - p.grid.grid_line_color = None + # legend header + p.add_layout( + Label(x=-0.43, y=start_point + 0.13 * (len(data['percentage'])), text='Category', render_mode='css', + text_font_size='15px', text_font_style='bold')) - p.title.align = "center" - p.title.text_font_size = "{}px".format(title_text_font_size) + # contributors header + p.add_layout( + Label(x=0, y=start_point + 0.13 * (len(data['percentage'])), text='# Contributors', render_mode='css', + text_font_size='15px', text_font_style='bold')) - p.legend.location = "center_left" - p.legend.border_line_color = None - p.legend.label_text_font_style = 'bold' - p.legend.label_text_font_size = "15px" + p.axis.axis_label = None + p.axis.visible = False + p.grid.grid_line_color = None - plot = p + p.title.align = "center" + p.title.text_font_size = "{}px".format(title_text_font_size) - caption = """This pie chart shows the percentage of new contributors who were fly-by or repeat contributors. - Fly by contributors are contributors who make less than the required {0} contributions in {1} days. - New contributors are individuals who make their first contribution in the specified time period. - Repeat contributors are contributors who have made {0} or more contributions in {1} days and their - first contribution is in the specified time period.""" + p.legend.location = "center_left" + p.legend.border_line_color = None + p.legend.label_text_font_style = 'bold' + p.legend.label_text_font_size = "15px" - caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) + plot = p - # put graph and caption plot together into one grid - grid = gridplot([[plot], [caption_plot]]) + caption = """This pie chart shows the percentage of new contributors who were fly-by or repeat contributors. + Fly by contributors are contributors who make less than the required {0} contributions in {1} days. + New contributors are individuals who make their first contribution in the specified time period. + Repeat contributors are contributors who have made {0} or more contributions in {1} days and their + first contribution is in the specified time period.""" - filename = export_png(grid) + caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) - return send_file(filename) + # put graph and caption plot together into one grid + grid = gridplot([[plot], [caption_plot]]) - @server.app.route('/{}/contributor_reports/returning_contributors_stacked_bar/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def returning_contributors_stacked_bar(): + filename = export_png(grid) - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + return send_file(filename) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) +@app.route('/{}/contributor_reports/returning_contributors_stacked_bar/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def returning_contributors_stacked_bar(): - group_by = str(request.args.get('group_by', "quarter")) - required_contributions = int(request.args.get('required_contributions', 4)) - required_time = int(request.args.get('required_time', 365)) + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) - months_df = months_data_collection(start_date=start_date, end_date=end_date) + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - needed_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', - 'rank', 'yearmonth', 'new_contributors', 'quarter'] + group_by = str(request.args.get('group_by', "quarter")) + required_contributions = int(request.args.get('required_contributions', 4)) + required_time = int(request.args.get('required_time', 365)) - input_df = filter_data(input_df, needed_columns) + input_df = new_contributor_data_collection(repo_id=repo_id, required_contributions=required_contributions) + months_df = months_data_collection(start_date=start_date, end_date=end_date) - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + needed_columns = ['cntrb_id', 'created_at', 'month', 'year', 'repo_id', 'repo_name', 'login', 'action', + 'rank', 'yearmonth', 'new_contributors', 'quarter'] - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + input_df = filter_data(input_df, needed_columns) - drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, - required_time, start_date) + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - driver_df = pd.concat([drive_by_df, repeats_df, months_df]) + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} - # filter by end_date - mask = (driver_df['yearmonth'] < end_date) - driver_df = driver_df.loc[mask] + drive_by_df, repeats_df = compute_fly_by_and_returning_contributors_dfs(input_df, required_contributions, + required_time, start_date) - # create df to hold data needed for chart - data = pd.DataFrame() - if group_by == 'year': + driver_df = pd.concat([drive_by_df, repeats_df, months_df]) - # x-axis dates - data['dates'] = driver_df[group_by].unique() + # filter by end_date + mask = (driver_df['yearmonth'] < end_date) + driver_df = driver_df.loc[mask] - data['repeat_counts'] = \ - driver_df.loc[driver_df['type'] == 'repeat'].groupby(group_by).count().reset_index()['new_contributors'] - data['drive_by_counts'] = \ - driver_df.loc[driver_df['type'] == 'drive_by'].groupby(group_by).count().reset_index()[ - 'new_contributors'] + # create df to hold data needed for chart + data = pd.DataFrame() + if group_by == 'year': - # new contributor counts for all contributor counts - total_counts = [] - for i in range(0, len(data['drive_by_counts'])): - total_counts.append(data.iloc[i]['drive_by_counts'] + data.iloc[i]['repeat_counts']) - data['total_counts'] = total_counts + # x-axis dates + data['dates'] = driver_df[group_by].unique() - # used to format x-axis and graph title - group_by_format_string = "Year" + data['repeat_counts'] = \ + driver_df.loc[driver_df['type'] == 'repeat'].groupby(group_by).count().reset_index()['new_contributors'] + data['drive_by_counts'] = \ + driver_df.loc[driver_df['type'] == 'drive_by'].groupby(group_by).count().reset_index()[ + 'new_contributors'] - # font size of drive by and repeat labels - label_text_font_size = "14pt" + # new contributor counts for all contributor counts + total_counts = [] + for i in range(0, len(data['drive_by_counts'])): + total_counts.append(data.iloc[i]['drive_by_counts'] + data.iloc[i]['repeat_counts']) + data['total_counts'] = total_counts - elif group_by == 'quarter' or group_by == 'month': + # used to format x-axis and graph title + group_by_format_string = "Year" - # set variables to group the data by quarter or month - if group_by == 'quarter': - date_column = 'quarter' - group_by_format_string = "Quarter" + # font size of drive by and repeat labels + label_text_font_size = "14pt" - elif group_by == 'month': - date_column = 'yearmonth' - group_by_format_string = "Month" + elif group_by == 'quarter' or group_by == 'month': - # modifies the driver_df[date_column] to be a string with year and month, then finds all the unique values - data['dates'] = np.unique(np.datetime_as_string(driver_df[date_column], unit='M')) - data['drive_by_counts'] = pd.concat([driver_df.loc[driver_df['type'] == 'drive_by'], months_df]).groupby( - date_column).sum().reset_index()['new_contributors'] - data['repeat_counts'] = pd.concat([driver_df.loc[driver_df['type'] == 'repeat'], months_df]).groupby( - date_column).sum().reset_index()['new_contributors'] + # set variables to group the data by quarter or month + if group_by == 'quarter': + date_column = 'quarter' + group_by_format_string = "Quarter" - # new contributor counts for all contributor types - total_counts = [] - for i in range(0, len(data['drive_by_counts'])): - total_counts.append(data.iloc[i]['drive_by_counts'] + data.iloc[i]['repeat_counts']) - data['total_counts'] = total_counts + elif group_by == 'month': + date_column = 'yearmonth' + group_by_format_string = "Month" - # font size of drive by and repeat labels - label_text_font_size = "13pt" + # modifies the driver_df[date_column] to be a string with year and month, then finds all the unique values + data['dates'] = np.unique(np.datetime_as_string(driver_df[date_column], unit='M')) + data['drive_by_counts'] = pd.concat([driver_df.loc[driver_df['type'] == 'drive_by'], months_df]).groupby( + date_column).sum().reset_index()['new_contributors'] + data['repeat_counts'] = pd.concat([driver_df.loc[driver_df['type'] == 'repeat'], months_df]).groupby( + date_column).sum().reset_index()['new_contributors'] - data_source = {'Dates': data['dates'], - 'Fly By': data['drive_by_counts'], - 'Repeat': data['repeat_counts'], - 'All': data['total_counts']} + # new contributor counts for all contributor types + total_counts = [] + for i in range(0, len(data['drive_by_counts'])): + total_counts.append(data.iloc[i]['drive_by_counts'] + data.iloc[i]['repeat_counts']) + data['total_counts'] = total_counts - groups = ["Fly By", "Repeat"] + # font size of drive by and repeat labels + label_text_font_size = "13pt" - colors = ['#56B4E9', '#E69F00'] + data_source = {'Dates': data['dates'], + 'Fly By': data['drive_by_counts'], + 'Repeat': data['repeat_counts'], + 'All': data['total_counts']} - source = ColumnDataSource(data=data_source) + groups = ["Fly By", "Repeat"] - # format title - title_text_font_size = 18 + colors = ['#56B4E9', '#E69F00'] - # if the data set is large enough it will dynamically assign the width, if the data set - # is too small it will by default set to 780 pixel so the title fits - if len(data['total_counts']) >= 13: - plot_width = 46 * len(data['total_counts']) + 210 - else: - plot_width = 780 + source = ColumnDataSource(data=data_source) + + # format title + title_text_font_size = 18 + + # if the data set is large enough it will dynamically assign the width, if the data set + # is too small it will by default set to 780 pixel so the title fits + if len(data['total_counts']) >= 13: + plot_width = 46 * len(data['total_counts']) + 210 + else: + plot_width = 780 - p = figure(x_range=data['dates'], plot_height=500, plot_width=plot_width, - title="{}: Fly By and Repeat Contributor Counts per {}".format(repo_dict[repo_id], - group_by_format_string), - toolbar_location=None, y_range=(0, max(total_counts) * 1.15), margin=(0, 0, 0, 0)) + p = figure(x_range=data['dates'], plot_height=500, plot_width=plot_width, + title="{}: Fly By and Repeat Contributor Counts per {}".format(repo_dict[repo_id], + group_by_format_string), + toolbar_location=None, y_range=(0, max(total_counts) * 1.15), margin=(0, 0, 0, 0)) - vbar = p.vbar_stack(groups, x='Dates', width=0.8, color=colors, source=source) + vbar = p.vbar_stack(groups, x='Dates', width=0.8, color=colors, source=source) - # add total counts above bars - p.add_layout(LabelSet(x='Dates', y='All', text='All', y_offset=8, text_font_size="14pt", - text_color="black", source=source, text_align='center')) + # add total counts above bars + p.add_layout(LabelSet(x='Dates', y='All', text='All', y_offset=8, text_font_size="14pt", + text_color="black", source=source, text_align='center')) - # add drive by count labels - p.add_layout(LabelSet(x='Dates', y='Fly By', text='Fly By', y_offset=-22, text_font_size=label_text_font_size, - text_color="black", source=source, text_align='center')) + # add drive by count labels + p.add_layout(LabelSet(x='Dates', y='Fly By', text='Fly By', y_offset=-22, text_font_size=label_text_font_size, + text_color="black", source=source, text_align='center')) - # add repeat count labels - p.add_layout(LabelSet(x='Dates', y='All', text='Repeat', y_offset=-22, text_font_size=label_text_font_size, - text_color="black", source=source, text_align='center')) + # add repeat count labels + p.add_layout(LabelSet(x='Dates', y='All', text='Repeat', y_offset=-22, text_font_size=label_text_font_size, + text_color="black", source=source, text_align='center')) - # add legend - legend = Legend(items=[(date, [group]) for (date, group) in zip(groups, vbar)], location=(0, 200), - label_text_font_size="16px") - p.add_layout(legend, 'right') + # add legend + legend = Legend(items=[(date, [group]) for (date, group) in zip(groups, vbar)], location=(0, 200), + label_text_font_size="16px") + p.add_layout(legend, 'right') - p.xgrid.grid_line_color = None - p.y_range.start = 0 - p.axis.minor_tick_line_color = None - p.outline_line_color = None + p.xgrid.grid_line_color = None + p.y_range.start = 0 + p.axis.minor_tick_line_color = None + p.outline_line_color = None - p.title.align = "center" - p.title.text_font_size = "{}px".format(title_text_font_size) + p.title.align = "center" + p.title.text_font_size = "{}px".format(title_text_font_size) - p.yaxis.axis_label = '# Contributors' - p.xaxis.axis_label = group_by_format_string + p.yaxis.axis_label = '# Contributors' + p.xaxis.axis_label = group_by_format_string - p.xaxis.axis_label_text_font_size = "18px" - p.yaxis.axis_label_text_font_size = "16px" + p.xaxis.axis_label_text_font_size = "18px" + p.yaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "16px" - p.xaxis.major_label_orientation = 45.0 + p.xaxis.major_label_text_font_size = "16px" + p.xaxis.major_label_orientation = 45.0 - p.yaxis.major_label_text_font_size = "16px" + p.yaxis.major_label_text_font_size = "16px" - p.legend.label_text_font_size = "20px" + p.legend.label_text_font_size = "20px" - plot = p + plot = p - caption = """This graph shows the number of new contributors in the specified time period, and indicates how - many were fly-by and repeat contributors. Fly by contributors are contributors who make less than the required - {0} contributions in {1} days. New contributors are individuals who make their first contribution in the - specified time period. Repeat contributors are contributors who have made {0} or more contributions in {1} - days and their first contribution is in the specified time period.""" + caption = """This graph shows the number of new contributors in the specified time period, and indicates how + many were fly-by and repeat contributors. Fly by contributors are contributors who make less than the required + {0} contributions in {1} days. New contributors are individuals who make their first contribution in the + specified time period. Repeat contributors are contributors who have made {0} or more contributions in {1} + days and their first contribution is in the specified time period.""" - caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) + caption_plot = add_caption_to_visualizations(caption, required_contributions, required_time, plot_width) - # put graph and caption plot together into one grid - grid = gridplot([[plot], [caption_plot]]) + # put graph and caption plot together into one grid + grid = gridplot([[plot], [caption_plot]]) - filename = export_png(grid) + filename = export_png(grid) - return send_file(filename) + return send_file(filename) diff --git a/augur/api/routes/manager.py b/augur/api/routes/manager.py index fcb5524663..cb886b1831 100755 --- a/augur/api/routes/manager.py +++ b/augur/api/routes/manager.py @@ -18,188 +18,187 @@ import traceback from augur.api.routes import AUGUR_API_VERSION +from ..server import app logger = logging.getLogger(__name__) -def create_routes(server): - pass -# @server.app.route('/{}/add-repos'.format(AUGUR_API_VERSION), methods=['POST']) -# def add_repos(): -# """ returns list of successfully inserted repos and repos that caused an error -# adds repos belonging to any user or group to an existing augur repo group -# 'repos' are in the form org/repo, user/repo, or maybe even a full url -# """ -# if authenticate_request(server.augur_app, request): -# group = request.json['group'] -# repo_manager = Repo_insertion_manager(group, engine) -# group_id = repo_manager.get_org_id() -# errors = {} -# errors['invalid_inputs'] = [] -# errors['failed_records'] = [] -# success = [] -# repos = request.json['repos'] -# for repo in repos: -# url = Git_string(repo) -# url.clean_full_string() -# try: #need to test because we require org/repo or full git url -# url.is_repo() -# repo_name = url.get_repo_name() -# repo_parent = url.get_repo_organization() -# except ValueError: -# errors['invalid_inputs'].append(repo) -# else: -# try: -# repo_id = repo_manager.insert_repo(group_id, repo_parent, repo_name) -# except exc.SQLAlchemyError: -# errors['failed_records'].append(repo_name) -# else: -# success.append(get_inserted_repo(group_id, repo_id, repo_name, group, repo_manager.github_urlify(repo_parent, repo_name))) +# @app.route('/{}/add-repos'.format(AUGUR_API_VERSION), methods=['POST']) +# def add_repos(): +# """ returns list of successfully inserted repos and repos that caused an error +# adds repos belonging to any user or group to an existing augur repo group +# 'repos' are in the form org/repo, user/repo, or maybe even a full url +# """ +# if authenticate_request(server.augur_app, request): +# group = request.json['group'] +# repo_manager = Repo_insertion_manager(group, engine) +# group_id = repo_manager.get_org_id() +# errors = {} +# errors['invalid_inputs'] = [] +# errors['failed_records'] = [] +# success = [] +# repos = request.json['repos'] +# for repo in repos: +# url = Git_string(repo) +# url.clean_full_string() +# try: #need to test because we require org/repo or full git url +# url.is_repo() +# repo_name = url.get_repo_name() +# repo_parent = url.get_repo_organization() +# except ValueError: +# errors['invalid_inputs'].append(repo) +# else: +# try: +# repo_id = repo_manager.insert_repo(group_id, repo_parent, repo_name) +# except exc.SQLAlchemyError: +# errors['failed_records'].append(repo_name) +# else: +# success.append(get_inserted_repo(group_id, repo_id, repo_name, group, repo_manager.github_urlify(repo_parent, repo_name))) -# status_code = 200 -# summary = {'repos_inserted': success, 'repos_not_inserted': errors} -# summary = json.dumps(summary) -# else: -# status_code = 401 -# summary = json.dumps({'error': "Augur API key is either missing or invalid"}) - -# return Response(response=summary, -# status=status_code, -# mimetype="application/json") - -# @server.app.route('/{}/create-repo-group'.format(AUGUR_API_VERSION), methods=['POST']) -# def create_repo_group(): -# if authenticate_request(server.augur_app, request): -# group = request.json['group'] -# repo_manager = Repo_insertion_manager(group, engine) -# summary = {} -# summary['errors'] = [] -# summary['repo_groups_created'] = [] - -# if group == '': -# summary['errors'].append("invalid group name") -# return Response(response=summary, status=200, mimetype="application/json") - +# status_code = 200 +# summary = {'repos_inserted': success, 'repos_not_inserted': errors} +# summary = json.dumps(summary) +# else: +# status_code = 401 +# summary = json.dumps({'error': "Augur API key is either missing or invalid"}) + +# return Response(response=summary, +# status=status_code, +# mimetype="application/json") + +# @app.route('/{}/create-repo-group'.format(AUGUR_API_VERSION), methods=['POST']) +# def create_repo_group(): +# if authenticate_request(server.augur_app, request): +# group = request.json['group'] +# repo_manager = Repo_insertion_manager(group, engine) +# summary = {} +# summary['errors'] = [] +# summary['repo_groups_created'] = [] + +# if group == '': +# summary['errors'].append("invalid group name") +# return Response(response=summary, status=200, mimetype="application/json") + +# try: +# group_id = repo_manager.get_org_id() +# except TypeError: # try: -# group_id = repo_manager.get_org_id() +# group_id = repo_manager.insert_repo_group() # except TypeError: +# summary['errors'].append("couldn't create group") +# else: +# summary['repo_groups_created'].append({"repo_group_id": group_id, "rg_name": group}) +# else: +# summary['errors'].append("group already exists") + +# summary = json.dumps(summary) +# status_code = 200 +# else: +# status_code = 401 +# summary = json.dumps({'error': "Augur API key is either missing or invalid"}) + +# return Response(response=summary, +# status=status_code, +# mimetype="application/json") + +# @app.route('/{}/import-org'.format(AUGUR_API_VERSION), methods=['POST']) +# def add_repo_group(): +# """ creates a new augur repo group and adds to it the given organization or user's repos +# takes an organization or user name +# """ +# if authenticate_request(server.augur_app, request): +# group = request.json['org'] +# repo_manager = Repo_insertion_manager(group, engine) +# summary = {} +# summary['group_errors'] = [] +# summary['failed_repo_records'] = [] +# summary['repo_records_created'] = [] +# group_exists = False +# try: +# #look for group in augur db +# group_id = repo_manager.get_org_id() +# except TypeError: +# #look for group on github +# if repo_manager.group_exists_gh(): # try: # group_id = repo_manager.insert_repo_group() # except TypeError: -# summary['errors'].append("couldn't create group") -# else: -# summary['repo_groups_created'].append({"repo_group_id": group_id, "rg_name": group}) +# summary['group_errors'].append("failed to create group") +# else: +# group_exists = True # else: -# summary['errors'].append("group already exists") - -# summary = json.dumps(summary) -# status_code = 200 +# summary['group_errors'].append("could not locate group in database or on github") # else: -# status_code = 401 -# summary = json.dumps({'error': "Augur API key is either missing or invalid"}) - -# return Response(response=summary, -# status=status_code, -# mimetype="application/json") - -# @server.app.route('/{}/import-org'.format(AUGUR_API_VERSION), methods=['POST']) -# def add_repo_group(): -# """ creates a new augur repo group and adds to it the given organization or user's repos -# takes an organization or user name -# """ -# if authenticate_request(server.augur_app, request): -# group = request.json['org'] -# repo_manager = Repo_insertion_manager(group, engine) -# summary = {} -# summary['group_errors'] = [] -# summary['failed_repo_records'] = [] -# summary['repo_records_created'] = [] -# group_exists = False +# group_exists = True + +# if group_exists: +# summary['group_id'] = str(group_id) +# summary['rg_name'] = group # try: -# #look for group in augur db -# group_id = repo_manager.get_org_id() -# except TypeError: -# #look for group on github -# if repo_manager.group_exists_gh(): +# repos_gh = repo_manager.fetch_repos() +# repos_in_augur = repo_manager.get_existing_repos(group_id) +# repos_db_set = set() +# for name in repos_in_augur: +# #repo_git is more reliable than repo name, so we'll just grab everything after the last slash +# name = (name['repo_git'].rsplit('/', 1)[1]) +# repos_db_set.add(name) +# repos_to_insert = set(repos_gh) - repos_db_set + +# for repo in repos_to_insert: # try: -# group_id = repo_manager.insert_repo_group() -# except TypeError: -# summary['group_errors'].append("failed to create group") +# repo_id = repo_manager.insert_repo(group_id, group, repo) +# except exc.SQLAlchemyError: +# summary['failed_repo_records'].append(repo) # else: -# group_exists = True -# else: -# summary['group_errors'].append("could not locate group in database or on github") -# else: -# group_exists = True - -# if group_exists: -# summary['group_id'] = str(group_id) -# summary['rg_name'] = group -# try: -# repos_gh = repo_manager.fetch_repos() -# repos_in_augur = repo_manager.get_existing_repos(group_id) -# repos_db_set = set() -# for name in repos_in_augur: -# #repo_git is more reliable than repo name, so we'll just grab everything after the last slash -# name = (name['repo_git'].rsplit('/', 1)[1]) -# repos_db_set.add(name) -# repos_to_insert = set(repos_gh) - repos_db_set - -# for repo in repos_to_insert: -# try: -# repo_id = repo_manager.insert_repo(group_id, group, repo) -# except exc.SQLAlchemyError: -# summary['failed_repo_records'].append(repo) -# else: -# summary['repo_records_created'].append(get_inserted_repo(group_id, repo_id, repo, group, repo_manager.github_urlify(group, repo))) -# except requests.ConnectionError: -# summary['group_errors'] = "failed to find the group's child repos" -# logger.debug(f'Error is: {e}.') -# except Exception as e: -# logger.debug(f'Error is: {e}.') - -# status_code = 200 -# summary = json.dumps(summary) -# else: -# status_code = 401 -# summary = json.dumps({'error': "Augur API key is either missing or invalid"}) - -# return Response(response=summary, -# status=status_code, -# mimetype="application/json") - -# def get_inserted_repo(groupid, repoid, reponame, groupname, url): -# inserted_repo={} -# inserted_repo['repo_group_id'] = str(groupid) -# inserted_repo['repo_id'] = str(repoid) -# inserted_repo['repo_name'] = reponame -# inserted_repo['rg_name'] = groupname -# inserted_repo['url'] = url -# return inserted_repo +# summary['repo_records_created'].append(get_inserted_repo(group_id, repo_id, repo, group, repo_manager.github_urlify(group, repo))) +# except requests.ConnectionError: +# summary['group_errors'] = "failed to find the group's child repos" +# logger.debug(f'Error is: {e}.') +# except Exception as e: +# logger.debug(f'Error is: {e}.') + +# status_code = 200 +# summary = json.dumps(summary) +# else: +# status_code = 401 +# summary = json.dumps({'error': "Augur API key is either missing or invalid"}) + +# return Response(response=summary, +# status=status_code, +# mimetype="application/json") + +# def get_inserted_repo(groupid, repoid, reponame, groupname, url): +# inserted_repo={} +# inserted_repo['repo_group_id'] = str(groupid) +# inserted_repo['repo_id'] = str(repoid) +# inserted_repo['repo_name'] = reponame +# inserted_repo['rg_name'] = groupname +# inserted_repo['url'] = url +# return inserted_repo # class Repo_insertion_manager(): -# ROOT_AUGUR_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - -# def __init__(self, organization_name, database_connection): -# #self.initialize_logging() -# self.org = organization_name -# self.db = database_connection -# ## added for keys -# self._root_augur_dir = Repo_insertion_manager.ROOT_AUGUR_DIR -# self.augur_config = AugurConfig(self._root_augur_dir) -# ########## - - -# def get_existing_repos(self, group_id): -# """returns repos belonging to repogroup in augur db""" -# select_repos_query = s.sql.text(""" -# SELECT repo_git from augur_data.repo -# WHERE repo_group_id = :repo_group_id -# """) -# select_repos_query = select_repos_query.bindparams(repo_group_id = group_id) -# result = self.db.execute(select_repos_query) -# return result.fetchall() +# ROOT_AUGUR_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + +# def __init__(self, organization_name, database_connection): +# #self.initialize_logging() +# self.org = organization_name +# self.db = database_connection +# ## added for keys +# self._root_augur_dir = Repo_insertion_manager.ROOT_AUGUR_DIR +# self.augur_config = AugurConfig(self._root_augur_dir) +# ########## + + +# def get_existing_repos(self, group_id): +# """returns repos belonging to repogroup in augur db""" +# select_repos_query = s.sql.text(""" +# SELECT repo_git from augur_data.repo +# WHERE repo_group_id = :repo_group_id +# """) +# select_repos_query = select_repos_query.bindparams(repo_group_id = group_id) +# result = self.db.execute(select_repos_query) +# return result.fetchall() # ## This doesn't permit importing of an individual's repo, as they don't show up under "orgs" # # def group_exists_gh(self): @@ -212,98 +211,98 @@ def create_routes(server): # # return True # ## Revised Version of Method -# def group_exists_gh(self): -# url = url = "https://api.github.com/orgs/{}".format(self.org) -# ## attempting to add key due to rate limiting -# gh_api_key = self.augur_config.get_value('Database', 'key') -# self.headers = {'Authorization': 'token %s' % gh_api_key} -# #r = requests.get(url=cntrb_url, headers=self.headers) +# def group_exists_gh(self): +# url = url = "https://api.github.com/orgs/{}".format(self.org) +# ## attempting to add key due to rate limiting +# gh_api_key = self.augur_config.get_value('Database', 'key') +# self.headers = {'Authorization': 'token %s' % gh_api_key} +# #r = requests.get(url=cntrb_url, headers=self.headers) # ####### Original request code # # res = requests.get(url).json() # ######## -# res = requests.get(url=url, headers=self.headers).json() -# try: +# res = requests.get(url=url, headers=self.headers).json() +# try: +# if res['message'] == "Not Found": +# url = url = "https://api.github.com/users/{}".format(self.org) +# res = requests.get(url=url, headers=self.headers).json() # if res['message'] == "Not Found": -# url = url = "https://api.github.com/users/{}".format(self.org) -# res = requests.get(url=url, headers=self.headers).json() -# if res['message'] == "Not Found": -# return False -# except KeyError: -# return True - -# def insert_repo(self, orgid, given_org, reponame): -# """creates a new repo record""" -# insert_repo_query = s.sql.text(""" -# INSERT INTO augur_data.repo(repo_group_id, repo_git, repo_status, -# tool_source, tool_version, data_source, data_collection_date) -# VALUES (:repo_group_id, :repo_git, 'New', 'CLI', 1.0, 'Git', CURRENT_TIMESTAMP) -# RETURNING repo_id -# """) -# repogit = self.github_urlify(given_org, reponame) -# insert_repo_query = insert_repo_query.bindparams(repo_group_id = int(orgid), repo_git = repogit) -# result = self.db.execute(insert_repo_query).fetchone() -# return result['repo_id'] - -# def github_urlify(self, org, repo): -# return "https://github.com/" + org + "/" + repo - -# def get_org_id(self): -# select_group_query = s.sql.text(""" -# SELECT repo_group_id -# FROM augur_data.repo_groups -# WHERE rg_name = :group_name -# """) -# select_group_query = select_group_query.bindparams(group_name = self.org) -# result = self.db.execute(select_group_query) -# row = result.fetchone() -# return row['repo_group_id'] - -# def insert_repo_group(self): -# """creates a new repo_group record and returns its id""" -# insert_group_query = s.sql.text(""" -# INSERT INTO augur_data.repo_groups(rg_name, rg_description, rg_website, rg_recache, rg_last_modified, rg_type, -# tool_source, tool_version, data_source, data_collection_date) -# VALUES (:group_name, '', '', 1, CURRENT_TIMESTAMP, 'Unknown', 'Loaded by user', 1.0, 'Git', CURRENT_TIMESTAMP) -# RETURNING repo_group_id -# """) -# insert_group_query = insert_group_query.bindparams(group_name = self.org) -# result = self.db.execute(insert_group_query) -# row = result.fetchone() -# return row['repo_group_id'] - -# def fetch_repos(self): -# """uses the github api to return repos belonging to the given organization""" -# gh_api_key = self.augur_config.get_value('Database', 'key') -# self.headers = {'Authorization': 'token %s' % gh_api_key} -# repos = [] -# page = 1 -# url = self.paginate(page) -# res = requests.get(url, headers=self.headers).json() -# while res: -# for repo in res: -# repos.append(repo['name']) -# page += 1 -# res = requests.get(self.paginate(page)).json() -# return repos +# return False +# except KeyError: +# return True + +# def insert_repo(self, orgid, given_org, reponame): +# """creates a new repo record""" +# insert_repo_query = s.sql.text(""" +# INSERT INTO augur_data.repo(repo_group_id, repo_git, repo_status, +# tool_source, tool_version, data_source, data_collection_date) +# VALUES (:repo_group_id, :repo_git, 'New', 'CLI', 1.0, 'Git', CURRENT_TIMESTAMP) +# RETURNING repo_id +# """) +# repogit = self.github_urlify(given_org, reponame) +# insert_repo_query = insert_repo_query.bindparams(repo_group_id = int(orgid), repo_git = repogit) +# result = self.db.execute(insert_repo_query).fetchone() +# return result['repo_id'] + +# def github_urlify(self, org, repo): +# return "https://github.com/" + org + "/" + repo + +# def get_org_id(self): +# select_group_query = s.sql.text(""" +# SELECT repo_group_id +# FROM augur_data.repo_groups +# WHERE rg_name = :group_name +# """) +# select_group_query = select_group_query.bindparams(group_name = self.org) +# result = self.db.execute(select_group_query) +# row = result.fetchone() +# return row['repo_group_id'] + +# def insert_repo_group(self): +# """creates a new repo_group record and returns its id""" +# insert_group_query = s.sql.text(""" +# INSERT INTO augur_data.repo_groups(rg_name, rg_description, rg_website, rg_recache, rg_last_modified, rg_type, +# tool_source, tool_version, data_source, data_collection_date) +# VALUES (:group_name, '', '', 1, CURRENT_TIMESTAMP, 'Unknown', 'Loaded by user', 1.0, 'Git', CURRENT_TIMESTAMP) +# RETURNING repo_group_id +# """) +# insert_group_query = insert_group_query.bindparams(group_name = self.org) +# result = self.db.execute(insert_group_query) +# row = result.fetchone() +# return row['repo_group_id'] + +# def fetch_repos(self): +# """uses the github api to return repos belonging to the given organization""" +# gh_api_key = self.augur_config.get_value('Database', 'key') +# self.headers = {'Authorization': 'token %s' % gh_api_key} +# repos = [] +# page = 1 +# url = self.paginate(page) +# res = requests.get(url, headers=self.headers).json() +# while res: +# for repo in res: +# repos.append(repo['name']) +# page += 1 +# res = requests.get(self.paginate(page)).json() +# return repos # ## Modified pagination to account for github orgs that look like orgs but are actually users. -# def paginate(self, page): +# def paginate(self, page): # ### Modified here to incorporate the use of a GitHub API Key -# gh_api_key = self.augur_config.get_value('Database', 'key') -# self.headers = {'Authorization': 'token %s' % gh_api_key} -# url = "https://api.github.com/orgs/{}/repos?per_page=100&page={}" -# res = requests.get(url, headers=self.headers).json() -# if res['message'] == "Not Found": -# url = "https://api.github.com/users/{}/repos?per_page=100&page={}" -# res = requests.get(url=url, headers=self.headers).json() -# return url.format(self.org, str(page)) +# gh_api_key = self.augur_config.get_value('Database', 'key') +# self.headers = {'Authorization': 'token %s' % gh_api_key} +# url = "https://api.github.com/orgs/{}/repos?per_page=100&page={}" +# res = requests.get(url, headers=self.headers).json() +# if res['message'] == "Not Found": +# url = "https://api.github.com/users/{}/repos?per_page=100&page={}" +# res = requests.get(url=url, headers=self.headers).json() +# return url.format(self.org, str(page)) -# #r = requests.get(url=cntrb_url, headers=self.headers) +# #r = requests.get(url=cntrb_url, headers=self.headers) # ####### Original request code # # res = requests.get(url).json() # ######## -# res = requests.get(url=url, headers=self.headers).json() +# res = requests.get(url=url, headers=self.headers).json() @@ -315,72 +314,72 @@ def create_routes(server): # # return url.format(self.org, str(page)) # class Git_string(): -# """ represents possible repo, org or username arguments """ -# def __init__(self, string_to_process): -# self.name = string_to_process - -# def clean_full_string(self): -# """remove trailing slash, protocol, and source if present""" -# org = self.name -# if org.endswith('/'): -# org = org[:-1] -# if org.startswith('https://'): -# org = org[8:] -# slash_index = org.find('/') -# org = org[slash_index+1:] -# if org.startswith('git://'): -# org = org[6:] -# slash_index = org.find('/') -# org = org[slash_index+1:] -# self.name = org - -# def is_repo(self): -# """test for org/repo or user/repo form""" -# slash_count = 0 -# for char in self.name: -# if char == '/': -# slash_count += 1 -# if slash_count == 1: -# return -# else: -# raise ValueError - -# def get_repo_organization(self): -# org = self.name -# return org[:org.find('/')] +# """ represents possible repo, org or username arguments """ +# def __init__(self, string_to_process): +# self.name = string_to_process + +# def clean_full_string(self): +# """remove trailing slash, protocol, and source if present""" +# org = self.name +# if org.endswith('/'): +# org = org[:-1] +# if org.startswith('https://'): +# org = org[8:] +# slash_index = org.find('/') +# org = org[slash_index+1:] +# if org.startswith('git://'): +# org = org[6:] +# slash_index = org.find('/') +# org = org[slash_index+1:] +# self.name = org + +# def is_repo(self): +# """test for org/repo or user/repo form""" +# slash_count = 0 +# for char in self.name: +# if char == '/': +# slash_count += 1 +# if slash_count == 1: +# return +# else: +# raise ValueError + +# def get_repo_organization(self): +# org = self.name +# return org[:org.find('/')] -# def get_repo_name(self): -# repo = self.name -# return repo[repo.find('/')+1:] +# def get_repo_name(self): +# repo = self.name +# return repo[repo.find('/')+1:] # def authenticate_request(augur_app, request): -# # do I like doing it like this? not at all -# # do I have the time to implement a better solution right now? not at all -# user = augur_app.config.get_value('Database', 'user') -# password = augur_app.config.get_value('Database', 'password') -# host = augur_app.config.get_value('Database', 'host') -# port = augur_app.config.get_value('Database', 'port') -# dbname = augur_app.config.get_value('Database', 'name') +# # do I like doing it like this? not at all +# # do I have the time to implement a better solution right now? not at all +# user = augur_app.config.get_value('Database', 'user') +# password = augur_app.config.get_value('Database', 'password') +# host = augur_app.config.get_value('Database', 'host') +# port = augur_app.config.get_value('Database', 'port') +# dbname = augur_app.config.get_value('Database', 'name') -# DB_STR = 'postgresql://{}:{}@{}:{}/{}'.format( -# user, password, host, port, dbname -# ) +# DB_STR = 'postgresql://{}:{}@{}:{}/{}'.format( +# user, password, host, port, dbname +# ) -# operations_db = s.create_engine(DB_STR, poolclass=s.pool.NullPool) +# operations_db = s.create_engine(DB_STR, poolclass=s.pool.NullPool) -# update_api_key_sql = s.sql.text(""" -# SELECT value FROM augur_operations.augur_settings WHERE setting='augur_api_key'; -# """) +# update_api_key_sql = s.sql.text(""" +# SELECT value FROM augur_operations.augur_settings WHERE setting='augur_api_key'; +# """) -# retrieved_api_key = operations_db.execute(update_api_key_sql).fetchone()[0] +# retrieved_api_key = operations_db.execute(update_api_key_sql).fetchone()[0] -# try: -# given_api_key = request.json['augur_api_key'] -# except KeyError: -# return False +# try: +# given_api_key = request.json['augur_api_key'] +# except KeyError: +# return False -# if given_api_key == retrieved_api_key and given_api_key != "invalid_key": -# return True -# else: -# return False +# if given_api_key == retrieved_api_key and given_api_key != "invalid_key": +# return True +# else: +# return False diff --git a/augur/api/routes/metadata.py b/augur/api/routes/metadata.py index 8d4cad3c5a..389a3d9d18 100644 --- a/augur/api/routes/metadata.py +++ b/augur/api/routes/metadata.py @@ -13,72 +13,71 @@ import requests from augur.api.routes import AUGUR_API_VERSION +from ..server import app, engine -def create_routes(server): +@app.route('/{}/metadata/repo_info'.format(AUGUR_API_VERSION), methods=["GET"]) +def get_repo_info(): + repo_info_sql = s.sql.text(""" + SELECT + repo.repo_git, + repo.repo_name, + repo.repo_id, + repo_info.default_branch, + repo_info.license, + repo_info.fork_count, + repo_info.watchers_count, + repo_info.stars_count, + repo_info.commit_count, + repo_info.committers_count, + repo_info.open_issues, + repo_info.issues_count, + repo_info.issues_closed, + repo_info.pull_request_count, + repo_info.pull_requests_open, + repo_info.pull_requests_closed, + repo_info.pull_requests_merged + FROM + repo_info, + repo, + ( SELECT repo_id, MAX ( data_collection_date ) AS last_collected FROM augur_data.repo_info GROUP BY repo_id ORDER BY repo_id ) e + WHERE + repo_info.repo_id = repo.repo_id + AND e.repo_id = repo_info.repo_id + AND e.last_collected = repo_info.data_collection_date + ORDER BY + repo.repo_name; + """) + results = pd.read_sql(repo_info_sql, engine) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + parsed_data = json.loads(data) + return Response(response=data, + status=200, + mimetype="application/json") - @server.app.route('/{}/metadata/repo_info'.format(AUGUR_API_VERSION), methods=["GET"]) - def get_repo_info(): - repo_info_sql = s.sql.text(""" - SELECT - repo.repo_git, - repo.repo_name, - repo.repo_id, - repo_info.default_branch, - repo_info.license, - repo_info.fork_count, - repo_info.watchers_count, - repo_info.stars_count, - repo_info.commit_count, - repo_info.committers_count, - repo_info.open_issues, - repo_info.issues_count, - repo_info.issues_closed, - repo_info.pull_request_count, - repo_info.pull_requests_open, - repo_info.pull_requests_closed, - repo_info.pull_requests_merged - FROM - repo_info, - repo, - ( SELECT repo_id, MAX ( data_collection_date ) AS last_collected FROM augur_data.repo_info GROUP BY repo_id ORDER BY repo_id ) e - WHERE - repo_info.repo_id = repo.repo_id - AND e.repo_id = repo_info.repo_id - AND e.last_collected = repo_info.data_collection_date - ORDER BY - repo.repo_name; - """) - results = pd.read_sql(repo_info_sql, server.engine) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - parsed_data = json.loads(data) - return Response(response=data, - status=200, - mimetype="application/json") +@app.route('/{}/metadata/contributions_count'.format(AUGUR_API_VERSION), methods=["GET"]) +def contributions_count(): + repo_info_sql = s.sql.text(""" + select repo_git, count(*) as contributions from contributor_repo + group by repo_git + order by contributions desc; + """) + results = pd.read_sql(repo_info_sql, engine) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + parsed_data = json.loads(data) + return Response(response=data, + status=200, + mimetype="application/json") - @server.app.route('/{}/metadata/contributions_count'.format(AUGUR_API_VERSION), methods=["GET"]) - def contributions_count(): - repo_info_sql = s.sql.text(""" - select repo_git, count(*) as contributions from contributor_repo - group by repo_git - order by contributions desc; - """) - results = pd.read_sql(repo_info_sql, server.engine) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - parsed_data = json.loads(data) - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/metadata/contributors_count'.format(AUGUR_API_VERSION), methods=["GET"]) - def contributors_count(): - repo_info_sql = s.sql.text(""" - select repo_git, count(distinct(cntrb_id)) as contributors from contributor_repo - group by repo_git - order by contributors desc; - """) - results = pd.read_sql(repo_info_sql, server.engine) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - parsed_data = json.loads(data) - return Response(response=data, - status=200, - mimetype="application/json") +@app.route('/{}/metadata/contributors_count'.format(AUGUR_API_VERSION), methods=["GET"]) +def contributors_count(): + repo_info_sql = s.sql.text(""" + select repo_git, count(distinct(cntrb_id)) as contributors from contributor_repo + group by repo_git + order by contributors desc; + """) + results = pd.read_sql(repo_info_sql, engine) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + parsed_data = json.loads(data) + return Response(response=data, + status=200, + mimetype="application/json") diff --git a/augur/api/routes/nonstandard_metrics.py b/augur/api/routes/nonstandard_metrics.py index 71ac2ff13a..b100ab303b 100644 --- a/augur/api/routes/nonstandard_metrics.py +++ b/augur/api/routes/nonstandard_metrics.py @@ -8,24 +8,21 @@ from augur.api.metrics.repo_meta import license_files from augur.api.metrics.insight import top_insights -# from augur.api.server import transform -from augur.api.server import server - from augur.api.routes import AUGUR_API_VERSION +from ..server import app, route_transform -def create_routes(server): - @server.app.route(f"/{AUGUR_API_VERSION}/////license-files") - def get_license_files(license_id, spdx_binary, repo_group_id, repo_id): - arguments = [license_id, spdx_binary, repo_group_id, repo_id] - license_files = server.transform(license_files, args=arguments) - return Response(response=license_files, - status=200, - mimetype="application/json") +@app.route(f"/{AUGUR_API_VERSION}/////license-files") +def get_license_files(license_id, spdx_binary, repo_group_id, repo_id): + arguments = [license_id, spdx_binary, repo_group_id, repo_id] + license_files = route_transform(license_files, args=arguments) + return Response(response=license_files, + status=200, + mimetype="application/json") - @server.app.route(f"/{AUGUR_API_VERSION}/repo-groups//top-insights") - def top_insights(repo_group_id): - data = server.transform(top_insights, args=[repo_group_id]) - return Response(response=data, - status=200, - mimetype="application/json") \ No newline at end of file +@app.route(f"/{AUGUR_API_VERSION}/repo-groups//top-insights") +def top_insights(repo_group_id): + data = route_transform(top_insights, args=[repo_group_id]) + return Response(response=data, + status=200, + mimetype="application/json") \ No newline at end of file diff --git a/augur/api/routes/pull_request_reports.py b/augur/api/routes/pull_request_reports.py index b130e403a2..c5e936af5e 100644 --- a/augur/api/routes/pull_request_reports.py +++ b/augur/api/routes/pull_request_reports.py @@ -24,1893 +24,1893 @@ warnings.filterwarnings('ignore') from augur.api.routes import AUGUR_API_VERSION - -def create_routes(server): - def pull_request_data_collection(repo_id, start_date, end_date): - - pr_query = salc.sql.text(f""" - SELECT - repo.repo_id AS repo_id, - pull_requests.pr_src_id AS pr_src_id, - repo.repo_name AS repo_name, - pr_src_author_association, - repo_groups.rg_name AS repo_group, - pull_requests.pr_src_state, - pull_requests.pr_merged_at, - pull_requests.pr_created_at AS pr_created_at, - pull_requests.pr_closed_at AS pr_closed_at, - date_part( 'year', pr_created_at :: DATE ) AS CREATED_YEAR, - date_part( 'month', pr_created_at :: DATE ) AS CREATED_MONTH, - date_part( 'year', pr_closed_at :: DATE ) AS CLOSED_YEAR, - date_part( 'month', pr_closed_at :: DATE ) AS CLOSED_MONTH, - pr_src_meta_label, - pr_head_or_base, - ( EXTRACT ( EPOCH FROM pull_requests.pr_closed_at ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 3600 AS hours_to_close, - ( EXTRACT ( EPOCH FROM pull_requests.pr_closed_at ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 86400 AS days_to_close, - ( EXTRACT ( EPOCH FROM first_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 3600 AS hours_to_first_response, - ( EXTRACT ( EPOCH FROM first_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 86400 AS days_to_first_response, - ( EXTRACT ( EPOCH FROM last_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 3600 AS hours_to_last_response, - ( EXTRACT ( EPOCH FROM last_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 86400 AS days_to_last_response, - first_response_time, - last_response_time, - average_time_between_responses, - assigned_count, - review_requested_count, - labeled_count, - subscribed_count, - mentioned_count, - referenced_count, - closed_count, - head_ref_force_pushed_count, - merged_count, - milestoned_count, - unlabeled_count, - head_ref_deleted_count, - comment_count, - lines_added, - lines_removed, - commit_count, - file_count - FROM - repo, - repo_groups, - pull_requests LEFT OUTER JOIN ( - SELECT pull_requests.pull_request_id, - count(*) FILTER (WHERE action = 'assigned') AS assigned_count, - count(*) FILTER (WHERE action = 'review_requested') AS review_requested_count, - count(*) FILTER (WHERE action = 'labeled') AS labeled_count, - count(*) FILTER (WHERE action = 'unlabeled') AS unlabeled_count, - count(*) FILTER (WHERE action = 'subscribed') AS subscribed_count, - count(*) FILTER (WHERE action = 'mentioned') AS mentioned_count, - count(*) FILTER (WHERE action = 'referenced') AS referenced_count, - count(*) FILTER (WHERE action = 'closed') AS closed_count, - count(*) FILTER (WHERE action = 'head_ref_force_pushed') AS head_ref_force_pushed_count, - count(*) FILTER (WHERE action = 'head_ref_deleted') AS head_ref_deleted_count, - count(*) FILTER (WHERE action = 'milestoned') AS milestoned_count, - count(*) FILTER (WHERE action = 'merged') AS merged_count, - MIN(message.msg_timestamp) AS first_response_time, - COUNT(DISTINCT message.msg_timestamp) AS comment_count, - MAX(message.msg_timestamp) AS last_response_time, - (MAX(message.msg_timestamp) - MIN(message.msg_timestamp)) / COUNT(DISTINCT message.msg_timestamp) AS average_time_between_responses - FROM pull_request_events, pull_requests, repo, pull_request_message_ref, message - WHERE repo.repo_id = {repo_id} - AND repo.repo_id = pull_requests.repo_id - AND pull_requests.pull_request_id = pull_request_events.pull_request_id - AND pull_requests.pull_request_id = pull_request_message_ref.pull_request_id - AND pull_request_message_ref.msg_id = message.msg_id - GROUP BY pull_requests.pull_request_id - ) response_times - ON pull_requests.pull_request_id = response_times.pull_request_id - LEFT OUTER JOIN ( - SELECT pull_request_commits.pull_request_id, count(DISTINCT pr_cmt_sha) AS commit_count FROM pull_request_commits, pull_requests, pull_request_meta - WHERE pull_requests.pull_request_id = pull_request_commits.pull_request_id - AND pull_requests.pull_request_id = pull_request_meta.pull_request_id - AND pull_requests.repo_id = {repo_id} - AND pr_cmt_sha <> pull_requests.pr_merge_commit_sha - AND pr_cmt_sha <> pull_request_meta.pr_sha - GROUP BY pull_request_commits.pull_request_id - ) all_commit_counts - ON pull_requests.pull_request_id = all_commit_counts.pull_request_id - LEFT OUTER JOIN ( - SELECT MAX(pr_repo_meta_id), pull_request_meta.pull_request_id, pr_head_or_base, pr_src_meta_label - FROM pull_requests, pull_request_meta - WHERE pull_requests.pull_request_id = pull_request_meta.pull_request_id - AND pull_requests.repo_id = {repo_id} - AND pr_head_or_base = 'base' - GROUP BY pull_request_meta.pull_request_id, pr_head_or_base, pr_src_meta_label - ) base_labels - ON base_labels.pull_request_id = all_commit_counts.pull_request_id - LEFT OUTER JOIN ( - SELECT sum(cmt_added) AS lines_added, sum(cmt_removed) AS lines_removed, pull_request_commits.pull_request_id, count(DISTINCT cmt_filename) AS file_count - FROM pull_request_commits, commits, pull_requests, pull_request_meta - WHERE cmt_commit_hash = pr_cmt_sha - AND pull_requests.pull_request_id = pull_request_commits.pull_request_id - AND pull_requests.pull_request_id = pull_request_meta.pull_request_id - AND pull_requests.repo_id = {repo_id} - AND commits.repo_id = pull_requests.repo_id - AND commits.cmt_commit_hash <> pull_requests.pr_merge_commit_sha - AND commits.cmt_commit_hash <> pull_request_meta.pr_sha - GROUP BY pull_request_commits.pull_request_id - ) master_merged_counts - ON base_labels.pull_request_id = master_merged_counts.pull_request_id - WHERE - repo.repo_group_id = repo_groups.repo_group_id - AND repo.repo_id = pull_requests.repo_id - AND repo.repo_id = {repo_id} - ORDER BY - merged_count DESC - """) - pr_all = pd.read_sql(pr_query, server.engine) - - pr_all[['assigned_count', - 'review_requested_count', - 'labeled_count', - 'subscribed_count', - 'mentioned_count', - 'referenced_count', - 'closed_count', - 'head_ref_force_pushed_count', - 'merged_count', - 'milestoned_count', - 'unlabeled_count', - 'head_ref_deleted_count', - 'comment_count', - 'commit_count', - 'file_count', - 'lines_added', - 'lines_removed' - ]] = pr_all[['assigned_count', - 'review_requested_count', - 'labeled_count', - 'subscribed_count', - 'mentioned_count', - 'referenced_count', - 'closed_count', - 'head_ref_force_pushed_count', - 'merged_count', - 'milestoned_count', - 'unlabeled_count', - 'head_ref_deleted_count', - 'comment_count', - 'commit_count', - 'file_count', - 'lines_added', - 'lines_removed' - ]].astype(float) - # Change years to int so that doesn't display as 2019.0 for example - pr_all[['created_year', 'closed_year']] = pr_all[['created_year', 'closed_year']].fillna(-1).astype(int).astype( - str) - - start_date = pd.to_datetime(start_date) - # end_date = pd.to_datetime('2020-02-01 09:00:00') - end_date = pd.to_datetime(end_date) - pr_all = pr_all[(pr_all['pr_created_at'] > start_date) & (pr_all['pr_closed_at'] < end_date)] - - pr_all['created_year'] = pr_all['created_year'].map(int) - pr_all['created_month'] = pr_all['created_month'].map(int) - pr_all['created_month'] = pr_all['created_month'].map(lambda x: '{0:0>2}'.format(x)) - pr_all['created_yearmonth'] = pd.to_datetime( - pr_all['created_year'].map(str) + '-' + pr_all['created_month'].map(str) + '-01') - - # getting the number of days of (today - created at) for the PRs that are still open - # and putting this in the days_to_close column - - # get timedeltas of creation time to todays date/time - days_to_close_open_pr = datetime.datetime.now() - pr_all.loc[pr_all['pr_src_state'] == 'open']['pr_created_at'] - - # get num days from above timedelta - days_to_close_open_pr = days_to_close_open_pr.apply(lambda x: x.days).astype(int) - - # for only OPEN pr's, set the days_to_close column equal to above dataframe - pr_all.loc[pr_all['pr_src_state'] == 'open'] = pr_all.loc[pr_all['pr_src_state'] == 'open'].assign( - days_to_close=days_to_close_open_pr) - - pr_all.loc[pr_all['pr_src_state'] == 'open'].head() - - # initiate column by setting all null datetimes - pr_all['closed_yearmonth'] = pd.to_datetime(np.nan) - - # Fill column with prettified string of year/month closed that looks like: 2019-07-01 - pr_all.loc[pr_all['pr_src_state'] == 'closed'] = pr_all.loc[pr_all['pr_src_state'] == 'closed'].assign( - closed_yearmonth=pd.to_datetime(pr_all.loc[pr_all['pr_src_state'] == 'closed']['closed_year'].astype(int - ).map( - str) + '-' + pr_all.loc[pr_all['pr_src_state'] == 'closed']['closed_month'].astype(int).map( - str) + '-01')) - - """ Merged flag """ - if 'pr_merged_at' in pr_all.columns.values: - pr_all['pr_merged_at'] = pr_all['pr_merged_at'].fillna(0) - pr_all['merged_flag'] = 'Not Merged / Rejected' - pr_all['merged_flag'].loc[pr_all['pr_merged_at'] != 0] = 'Merged / Accepted' - pr_all['merged_flag'].loc[pr_all['pr_src_state'] == 'open'] = 'Still Open' - del pr_all['pr_merged_at'] - - # Isolate the different state PRs for now - pr_open = pr_all.loc[pr_all['pr_src_state'] == 'open'] - pr_closed = pr_all.loc[pr_all['pr_src_state'] == 'closed'] - pr_merged = pr_all.loc[pr_all['merged_flag'] == 'Merged / Accepted'] - pr_not_merged = pr_all.loc[pr_all['merged_flag'] == 'Not Merged / Rejected'] - - # Filtering the 80th percentile slowest PRs - def filter_20_per_slowest(input_df): - pr_slow20_filtered = pd.DataFrame() - pr_slow20_x = pd.DataFrame() - pr_slow20_filtered = input_df.copy() - pr_slow20_filtered['percentile_rank_local'] = pr_slow20_filtered.days_to_close.rank(pct=True) - pr_slow20_filtered = pr_slow20_filtered.query('percentile_rank_local >= .8', ) - - return pr_slow20_filtered - - pr_slow20_open = filter_20_per_slowest(pr_open) - pr_slow20_closed = filter_20_per_slowest(pr_closed) - pr_slow20_merged = filter_20_per_slowest(pr_merged) - pr_slow20_not_merged = filter_20_per_slowest(pr_not_merged) - pr_slow20_all = filter_20_per_slowest(pr_all) - - return pr_all, pr_open, pr_closed, pr_merged, pr_not_merged, pr_slow20_all, pr_slow20_open, pr_slow20_closed, pr_slow20_merged, pr_slow20_not_merged - - def remove_outliers(input_df, field, num_outliers_repo_map): - df_no_outliers = input_df.copy() - for repo_name, num_outliers in num_outliers_repo_map.items(): - indices_to_drop = input_df.loc[input_df['repo_name'] == repo_name].nlargest(num_outliers, field).index - df_no_outliers = df_no_outliers.drop(index=indices_to_drop) - return df_no_outliers - - def remove_outliers_by_standard_deviation(input_df, column): - '''Takes a dataframe and a numeric column name. - Then removes all rows thare are than 3 standard deviations from the mean. - Returns a df without outliers, the # of outliers removed, outlier cutoff value''' - - # finds rows that are more than 3 standard deviations from the mean - outlier_cutoff = input_df[column].mean() + (3 * input_df[column].std()) - outlier_mask = input_df[column] > outlier_cutoff - - # determine number of outliers - outliers_removed = len(input_df.loc[outlier_mask]) - - df_no_outliers = input_df.loc[~outlier_mask] - - return df_no_outliers, outliers_removed, outlier_cutoff - - def hex_to_RGB(hex): - ''' "#FFFFFF" -> [255,255,255] ''' - # Pass 16 to the integer function for change of base - return [int(hex[i:i + 2], 16) for i in range(1, 6, 2)] - - def color_dict(gradient): - ''' Takes in a list of RGB sub-lists and returns dictionary of - colors in RGB and hex form for use in a graphing function - defined later on ''' - return {"hex": [RGB_to_hex(RGB) for RGB in gradient], - "r": [RGB[0] for RGB in gradient], - "g": [RGB[1] for RGB in gradient], - "b": [RGB[2] for RGB in gradient]} - - def RGB_to_hex(RGB): - ''' [255,255,255] -> "#FFFFFF" ''' - # Components need to be integers for hex to make sense - RGB = [int(x) for x in RGB] - return "#" + "".join(["0{0:x}".format(v) if v < 16 else - "{0:x}".format(v) for v in RGB]) - - def linear_gradient(start_hex, finish_hex="#FFFFFF", n=10): - ''' returns a gradient list of (n) colors between - two hex colors. start_hex and finish_hex - should be the full six-digit color string, - inlcuding the number sign ("#FFFFFF") ''' - # Starting and ending colors in RGB form - s = hex_to_RGB(start_hex) - f = hex_to_RGB(finish_hex) - # Initilize a list of the output colors with the starting color - RGB_list = [s] - # Calcuate a color at each evenly spaced value of t from 1 to n - for t in range(1, n): - # Interpolate RGB vector for color at the current value of t - curr_vector = [ - int(s[j] + (float(t) / (n - 1)) * (f[j] - s[j])) - for j in range(3) - ] - # Add it to our list of output colors - RGB_list.append(curr_vector) - - return color_dict(RGB_list) - - # dict of df types, and their locaiton in the tuple that the function pull_request_data_collection returns - def get_df_tuple_locations(): - return {"pr_all": 0, "pr_open": 1, "pr_closed": 2, "pr_merged": 3, "pr_not_merged": 4, "pr_slow20_all": 5, - "pr_slow20_open": 6, "pr_slow20_closed": 7, "pr_slow20_merged": 8, "pr_slow20_not_merged": 9} - - def add_caption_to_plot(caption_plot, caption): - - caption_plot.add_layout(Label( - x=0, # Change to shift caption left or right - y=160, - x_units='screen', - y_units='screen', - text='{}'.format(caption), - text_font='times', # Use same font as paper - text_font_size='15pt', - render_mode='css' - )) - caption_plot.outline_line_color = None - - return caption_plot - - def remove_rows_with_null_values(df, not_null_columns=[]): - """Remove null data from pandas df - - Parameters - -- df - description: the dataframe that will be modified - type: Pandas Dataframe - - -- list_of_columns - description: columns that are searched for NULL values - type: list - default: [] (means all columns will be checked for NULL values) - IMPORTANT: if an empty list is passed or nothing is passed it will check all columns for NULL values - - Return Value - -- Modified Pandas Dataframe - """ - - if len(not_null_columns) == 0: - not_null_columns = df.columns.to_list() - - total_rows_removed = 0 - for col in not_null_columns: - rows_removed = len(df.loc[df[col].isnull()]) - #rows_removed = len(df.loc[df[col].isnull() is True]) - - if rows_removed > 0: - print(f"{rows_removed} rows have been removed because of null values in column {col}") - total_rows_removed += rows_removed - - df = df.loc[df[col].isnull() is False] - - if total_rows_removed > 0: - print(f"\nTotal rows removed because of null data: {total_rows_removed}"); - else: - print("No null data found") +from ..server import app, engine + +def pull_request_data_collection(repo_id, start_date, end_date): + + pr_query = salc.sql.text(f""" + SELECT + repo.repo_id AS repo_id, + pull_requests.pr_src_id AS pr_src_id, + repo.repo_name AS repo_name, + pr_src_author_association, + repo_groups.rg_name AS repo_group, + pull_requests.pr_src_state, + pull_requests.pr_merged_at, + pull_requests.pr_created_at AS pr_created_at, + pull_requests.pr_closed_at AS pr_closed_at, + date_part( 'year', pr_created_at :: DATE ) AS CREATED_YEAR, + date_part( 'month', pr_created_at :: DATE ) AS CREATED_MONTH, + date_part( 'year', pr_closed_at :: DATE ) AS CLOSED_YEAR, + date_part( 'month', pr_closed_at :: DATE ) AS CLOSED_MONTH, + pr_src_meta_label, + pr_head_or_base, + ( EXTRACT ( EPOCH FROM pull_requests.pr_closed_at ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 3600 AS hours_to_close, + ( EXTRACT ( EPOCH FROM pull_requests.pr_closed_at ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 86400 AS days_to_close, + ( EXTRACT ( EPOCH FROM first_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 3600 AS hours_to_first_response, + ( EXTRACT ( EPOCH FROM first_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 86400 AS days_to_first_response, + ( EXTRACT ( EPOCH FROM last_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 3600 AS hours_to_last_response, + ( EXTRACT ( EPOCH FROM last_response_time ) - EXTRACT ( EPOCH FROM pull_requests.pr_created_at ) ) / 86400 AS days_to_last_response, + first_response_time, + last_response_time, + average_time_between_responses, + assigned_count, + review_requested_count, + labeled_count, + subscribed_count, + mentioned_count, + referenced_count, + closed_count, + head_ref_force_pushed_count, + merged_count, + milestoned_count, + unlabeled_count, + head_ref_deleted_count, + comment_count, + lines_added, + lines_removed, + commit_count, + file_count + FROM + repo, + repo_groups, + pull_requests LEFT OUTER JOIN ( + SELECT pull_requests.pull_request_id, + count(*) FILTER (WHERE action = 'assigned') AS assigned_count, + count(*) FILTER (WHERE action = 'review_requested') AS review_requested_count, + count(*) FILTER (WHERE action = 'labeled') AS labeled_count, + count(*) FILTER (WHERE action = 'unlabeled') AS unlabeled_count, + count(*) FILTER (WHERE action = 'subscribed') AS subscribed_count, + count(*) FILTER (WHERE action = 'mentioned') AS mentioned_count, + count(*) FILTER (WHERE action = 'referenced') AS referenced_count, + count(*) FILTER (WHERE action = 'closed') AS closed_count, + count(*) FILTER (WHERE action = 'head_ref_force_pushed') AS head_ref_force_pushed_count, + count(*) FILTER (WHERE action = 'head_ref_deleted') AS head_ref_deleted_count, + count(*) FILTER (WHERE action = 'milestoned') AS milestoned_count, + count(*) FILTER (WHERE action = 'merged') AS merged_count, + MIN(message.msg_timestamp) AS first_response_time, + COUNT(DISTINCT message.msg_timestamp) AS comment_count, + MAX(message.msg_timestamp) AS last_response_time, + (MAX(message.msg_timestamp) - MIN(message.msg_timestamp)) / COUNT(DISTINCT message.msg_timestamp) AS average_time_between_responses + FROM pull_request_events, pull_requests, repo, pull_request_message_ref, message + WHERE repo.repo_id = {repo_id} + AND repo.repo_id = pull_requests.repo_id + AND pull_requests.pull_request_id = pull_request_events.pull_request_id + AND pull_requests.pull_request_id = pull_request_message_ref.pull_request_id + AND pull_request_message_ref.msg_id = message.msg_id + GROUP BY pull_requests.pull_request_id + ) response_times + ON pull_requests.pull_request_id = response_times.pull_request_id + LEFT OUTER JOIN ( + SELECT pull_request_commits.pull_request_id, count(DISTINCT pr_cmt_sha) AS commit_count FROM pull_request_commits, pull_requests, pull_request_meta + WHERE pull_requests.pull_request_id = pull_request_commits.pull_request_id + AND pull_requests.pull_request_id = pull_request_meta.pull_request_id + AND pull_requests.repo_id = {repo_id} + AND pr_cmt_sha <> pull_requests.pr_merge_commit_sha + AND pr_cmt_sha <> pull_request_meta.pr_sha + GROUP BY pull_request_commits.pull_request_id + ) all_commit_counts + ON pull_requests.pull_request_id = all_commit_counts.pull_request_id + LEFT OUTER JOIN ( + SELECT MAX(pr_repo_meta_id), pull_request_meta.pull_request_id, pr_head_or_base, pr_src_meta_label + FROM pull_requests, pull_request_meta + WHERE pull_requests.pull_request_id = pull_request_meta.pull_request_id + AND pull_requests.repo_id = {repo_id} + AND pr_head_or_base = 'base' + GROUP BY pull_request_meta.pull_request_id, pr_head_or_base, pr_src_meta_label + ) base_labels + ON base_labels.pull_request_id = all_commit_counts.pull_request_id + LEFT OUTER JOIN ( + SELECT sum(cmt_added) AS lines_added, sum(cmt_removed) AS lines_removed, pull_request_commits.pull_request_id, count(DISTINCT cmt_filename) AS file_count + FROM pull_request_commits, commits, pull_requests, pull_request_meta + WHERE cmt_commit_hash = pr_cmt_sha + AND pull_requests.pull_request_id = pull_request_commits.pull_request_id + AND pull_requests.pull_request_id = pull_request_meta.pull_request_id + AND pull_requests.repo_id = {repo_id} + AND commits.repo_id = pull_requests.repo_id + AND commits.cmt_commit_hash <> pull_requests.pr_merge_commit_sha + AND commits.cmt_commit_hash <> pull_request_meta.pr_sha + GROUP BY pull_request_commits.pull_request_id + ) master_merged_counts + ON base_labels.pull_request_id = master_merged_counts.pull_request_id + WHERE + repo.repo_group_id = repo_groups.repo_group_id + AND repo.repo_id = pull_requests.repo_id + AND repo.repo_id = {repo_id} + ORDER BY + merged_count DESC + """) + pr_all = pd.read_sql(pr_query, engine) + + pr_all[['assigned_count', + 'review_requested_count', + 'labeled_count', + 'subscribed_count', + 'mentioned_count', + 'referenced_count', + 'closed_count', + 'head_ref_force_pushed_count', + 'merged_count', + 'milestoned_count', + 'unlabeled_count', + 'head_ref_deleted_count', + 'comment_count', + 'commit_count', + 'file_count', + 'lines_added', + 'lines_removed' + ]] = pr_all[['assigned_count', + 'review_requested_count', + 'labeled_count', + 'subscribed_count', + 'mentioned_count', + 'referenced_count', + 'closed_count', + 'head_ref_force_pushed_count', + 'merged_count', + 'milestoned_count', + 'unlabeled_count', + 'head_ref_deleted_count', + 'comment_count', + 'commit_count', + 'file_count', + 'lines_added', + 'lines_removed' + ]].astype(float) + # Change years to int so that doesn't display as 2019.0 for example + pr_all[['created_year', 'closed_year']] = pr_all[['created_year', 'closed_year']].fillna(-1).astype(int).astype( + str) + + start_date = pd.to_datetime(start_date) + # end_date = pd.to_datetime('2020-02-01 09:00:00') + end_date = pd.to_datetime(end_date) + pr_all = pr_all[(pr_all['pr_created_at'] > start_date) & (pr_all['pr_closed_at'] < end_date)] + + pr_all['created_year'] = pr_all['created_year'].map(int) + pr_all['created_month'] = pr_all['created_month'].map(int) + pr_all['created_month'] = pr_all['created_month'].map(lambda x: '{0:0>2}'.format(x)) + pr_all['created_yearmonth'] = pd.to_datetime( + pr_all['created_year'].map(str) + '-' + pr_all['created_month'].map(str) + '-01') + + # getting the number of days of (today - created at) for the PRs that are still open + # and putting this in the days_to_close column + + # get timedeltas of creation time to todays date/time + days_to_close_open_pr = datetime.datetime.now() - pr_all.loc[pr_all['pr_src_state'] == 'open']['pr_created_at'] + + # get num days from above timedelta + days_to_close_open_pr = days_to_close_open_pr.apply(lambda x: x.days).astype(int) + + # for only OPEN pr's, set the days_to_close column equal to above dataframe + pr_all.loc[pr_all['pr_src_state'] == 'open'] = pr_all.loc[pr_all['pr_src_state'] == 'open'].assign( + days_to_close=days_to_close_open_pr) + + pr_all.loc[pr_all['pr_src_state'] == 'open'].head() + + # initiate column by setting all null datetimes + pr_all['closed_yearmonth'] = pd.to_datetime(np.nan) + + # Fill column with prettified string of year/month closed that looks like: 2019-07-01 + pr_all.loc[pr_all['pr_src_state'] == 'closed'] = pr_all.loc[pr_all['pr_src_state'] == 'closed'].assign( + closed_yearmonth=pd.to_datetime(pr_all.loc[pr_all['pr_src_state'] == 'closed']['closed_year'].astype(int + ).map( + str) + '-' + pr_all.loc[pr_all['pr_src_state'] == 'closed']['closed_month'].astype(int).map( + str) + '-01')) + + """ Merged flag """ + if 'pr_merged_at' in pr_all.columns.values: + pr_all['pr_merged_at'] = pr_all['pr_merged_at'].fillna(0) + pr_all['merged_flag'] = 'Not Merged / Rejected' + pr_all['merged_flag'].loc[pr_all['pr_merged_at'] != 0] = 'Merged / Accepted' + pr_all['merged_flag'].loc[pr_all['pr_src_state'] == 'open'] = 'Still Open' + del pr_all['pr_merged_at'] + + # Isolate the different state PRs for now + pr_open = pr_all.loc[pr_all['pr_src_state'] == 'open'] + pr_closed = pr_all.loc[pr_all['pr_src_state'] == 'closed'] + pr_merged = pr_all.loc[pr_all['merged_flag'] == 'Merged / Accepted'] + pr_not_merged = pr_all.loc[pr_all['merged_flag'] == 'Not Merged / Rejected'] + + # Filtering the 80th percentile slowest PRs + def filter_20_per_slowest(input_df): + pr_slow20_filtered = pd.DataFrame() + pr_slow20_x = pd.DataFrame() + pr_slow20_filtered = input_df.copy() + pr_slow20_filtered['percentile_rank_local'] = pr_slow20_filtered.days_to_close.rank(pct=True) + pr_slow20_filtered = pr_slow20_filtered.query('percentile_rank_local >= .8', ) + + return pr_slow20_filtered + + pr_slow20_open = filter_20_per_slowest(pr_open) + pr_slow20_closed = filter_20_per_slowest(pr_closed) + pr_slow20_merged = filter_20_per_slowest(pr_merged) + pr_slow20_not_merged = filter_20_per_slowest(pr_not_merged) + pr_slow20_all = filter_20_per_slowest(pr_all) + + return pr_all, pr_open, pr_closed, pr_merged, pr_not_merged, pr_slow20_all, pr_slow20_open, pr_slow20_closed, pr_slow20_merged, pr_slow20_not_merged + +def remove_outliers(input_df, field, num_outliers_repo_map): + df_no_outliers = input_df.copy() + for repo_name, num_outliers in num_outliers_repo_map.items(): + indices_to_drop = input_df.loc[input_df['repo_name'] == repo_name].nlargest(num_outliers, field).index + df_no_outliers = df_no_outliers.drop(index=indices_to_drop) + return df_no_outliers + +def remove_outliers_by_standard_deviation(input_df, column): + '''Takes a dataframe and a numeric column name. + Then removes all rows thare are than 3 standard deviations from the mean. + Returns a df without outliers, the # of outliers removed, outlier cutoff value''' + + # finds rows that are more than 3 standard deviations from the mean + outlier_cutoff = input_df[column].mean() + (3 * input_df[column].std()) + outlier_mask = input_df[column] > outlier_cutoff + + # determine number of outliers + outliers_removed = len(input_df.loc[outlier_mask]) + + df_no_outliers = input_df.loc[~outlier_mask] + + return df_no_outliers, outliers_removed, outlier_cutoff + +def hex_to_RGB(hex): + ''' "#FFFFFF" -> [255,255,255] ''' + # Pass 16 to the integer function for change of base + return [int(hex[i:i + 2], 16) for i in range(1, 6, 2)] + +def color_dict(gradient): + ''' Takes in a list of RGB sub-lists and returns dictionary of + colors in RGB and hex form for use in a graphing function + defined later on ''' + return {"hex": [RGB_to_hex(RGB) for RGB in gradient], + "r": [RGB[0] for RGB in gradient], + "g": [RGB[1] for RGB in gradient], + "b": [RGB[2] for RGB in gradient]} + +def RGB_to_hex(RGB): + ''' [255,255,255] -> "#FFFFFF" ''' + # Components need to be integers for hex to make sense + RGB = [int(x) for x in RGB] + return "#" + "".join(["0{0:x}".format(v) if v < 16 else + "{0:x}".format(v) for v in RGB]) + +def linear_gradient(start_hex, finish_hex="#FFFFFF", n=10): + ''' returns a gradient list of (n) colors between + two hex colors. start_hex and finish_hex + should be the full six-digit color string, + inlcuding the number sign ("#FFFFFF") ''' + # Starting and ending colors in RGB form + s = hex_to_RGB(start_hex) + f = hex_to_RGB(finish_hex) + # Initilize a list of the output colors with the starting color + RGB_list = [s] + # Calcuate a color at each evenly spaced value of t from 1 to n + for t in range(1, n): + # Interpolate RGB vector for color at the current value of t + curr_vector = [ + int(s[j] + (float(t) / (n - 1)) * (f[j] - s[j])) + for j in range(3) + ] + # Add it to our list of output colors + RGB_list.append(curr_vector) + + return color_dict(RGB_list) + +# dict of df types, and their locaiton in the tuple that the function pull_request_data_collection returns +def get_df_tuple_locations(): + return {"pr_all": 0, "pr_open": 1, "pr_closed": 2, "pr_merged": 3, "pr_not_merged": 4, "pr_slow20_all": 5, + "pr_slow20_open": 6, "pr_slow20_closed": 7, "pr_slow20_merged": 8, "pr_slow20_not_merged": 9} + +def add_caption_to_plot(caption_plot, caption): + + caption_plot.add_layout(Label( + x=0, # Change to shift caption left or right + y=160, + x_units='screen', + y_units='screen', + text='{}'.format(caption), + text_font='times', # Use same font as paper + text_font_size='15pt', + render_mode='css' + )) + caption_plot.outline_line_color = None + + return caption_plot + +def remove_rows_with_null_values(df, not_null_columns=[]): + """Remove null data from pandas df + + Parameters + -- df + description: the dataframe that will be modified + type: Pandas Dataframe + + -- list_of_columns + description: columns that are searched for NULL values + type: list + default: [] (means all columns will be checked for NULL values) + IMPORTANT: if an empty list is passed or nothing is passed it will check all columns for NULL values + + Return Value + -- Modified Pandas Dataframe + """ + + if len(not_null_columns) == 0: + not_null_columns = df.columns.to_list() + + total_rows_removed = 0 + for col in not_null_columns: + rows_removed = len(df.loc[df[col].isnull()]) + #rows_removed = len(df.loc[df[col].isnull() is True]) + + if rows_removed > 0: + print(f"{rows_removed} rows have been removed because of null values in column {col}") + total_rows_removed += rows_removed + + df = df.loc[df[col].isnull() is False] + + if total_rows_removed > 0: + print(f"\nTotal rows removed because of null data: {total_rows_removed}"); + else: + print("No null data found") + + return df + +def get_needed_columns(df, list_of_columns): + """Get only a specific list of columns from a Pandas Dataframe + + Parameters + -- df + description: the dataframe that will be modified + type: Pandas Dataframe + + -- list_of_columns + description: columns that will be kept in dataframe + type: list + + Return Value + -- Modified Pandas Dataframe + """ + return df[list_of_columns] + +def filter_data(df, needed_columns, not_null_columns=[]): + """Filters out the unneeded rows in the df, and removed NULL data from df + + Parameters + -- df + description: the dataframe that will be modified + type: Pandas Dataframe + + -- needed_columns + description: the columns to keep in the dataframe + + -- not_null_columns + description: columns that will be searched for NULL data, + if NULL values are found those rows will be removed + default: [] (means all columns in needed_columns list will be checked for NULL values) + IMPORTANT: if an empty list is passed or nothing is passed it will check + all columns in needed_columns list for NULL values + Return Value + -- Modified Pandas Dataframe + """ + + if all(x in needed_columns for x in not_null_columns): + + df = get_needed_columns(df, needed_columns) + #Use the pandas method bc the other method was erroring on boolean index. + #IM - 9/23/22 + df = df.dropna(subset=not_null_columns)#remove_rows_with_null_values(df, not_null_columns) return df + else: + print("Developer error, not null columns should be a subset of needed columns") + return df - def get_needed_columns(df, list_of_columns): - """Get only a specific list of columns from a Pandas Dataframe +def get_repo_id_start_date_and_end_date(): - Parameters - -- df - description: the dataframe that will be modified - type: Pandas Dataframe + """ Gets the repo_id, start_date, and end_date from the GET requests array - -- list_of_columns - description: columns that will be kept in dataframe - type: list + :return: repo_id - id of the repo data is being retrieved for + :return: start_date - earliest time on visualization. Defaults to the January 1st of last year + :return: end_date - latest time on visualization. Defaults to current date + """ - Return Value - -- Modified Pandas Dataframe - """ - return df[list_of_columns] + now = datetime.datetime.now() - def filter_data(df, needed_columns, not_null_columns=[]): - """Filters out the unneeded rows in the df, and removed NULL data from df + repo_id = request.args.get('repo_id') + start_date = str(request.args.get('start_date', "{}-01-01".format(now.year - 1))) + end_date = str(request.args.get('end_date', "{}-{}-{}".format(now.year, now.month, now.day))) - Parameters - -- df - description: the dataframe that will be modified - type: Pandas Dataframe + if repo_id: - -- needed_columns - description: the columns to keep in the dataframe + if start_date < end_date: + return int(repo_id), start_date, end_date, None + else: - -- not_null_columns - description: columns that will be searched for NULL data, - if NULL values are found those rows will be removed - default: [] (means all columns in needed_columns list will be checked for NULL values) - IMPORTANT: if an empty list is passed or nothing is passed it will check - all columns in needed_columns list for NULL values - Return Value - -- Modified Pandas Dataframe - """ + error = { + "message": "Invalid end_date. end_date is before the start_date", + "status_code": 400 + } - if all(x in needed_columns for x in not_null_columns): + return int(repo_id), None, None, error - df = get_needed_columns(df, needed_columns) - #Use the pandas method bc the other method was erroring on boolean index. - #IM - 9/23/22 - df = df.dropna(subset=not_null_columns)#remove_rows_with_null_values(df, not_null_columns) + else: + error = { + "message": "repo_id not specified. Use this endpoint to get a list of available repos: http:///api/unstable/repos", + "status_code": 400 + } + return None, None, None, error - return df - else: - print("Developer error, not null columns should be a subset of needed columns") - return df +@app.route('/{}/pull_request_reports/average_commits_per_PR/'.format(AUGUR_API_VERSION), methods=["GET"]) +def average_commits_per_PR(): - def get_repo_id_start_date_and_end_date(): + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - """ Gets the repo_id, start_date, and end_date from the GET requests array + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - :return: repo_id - id of the repo data is being retrieved for - :return: start_date - earliest time on visualization. Defaults to the January 1st of last year - :return: end_date - latest time on visualization. Defaults to current date - """ + group_by = str(request.args.get('group_by', "month")) + return_json = request.args.get('return_json', "false") - now = datetime.datetime.now() + df_type = get_df_tuple_locations() - repo_id = request.args.get('repo_id') - start_date = str(request.args.get('start_date', "{}-01-01".format(now.year - 1))) - end_date = str(request.args.get('end_date', "{}-{}-{}".format(now.year, now.month, now.day))) + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - if repo_id: + y_axis = 'num_commits' + group_by_bars = 'merged_flag' + description = 'All' - if start_date < end_date: - return int(repo_id), start_date, end_date, None - else: + # gets pr_all data + # selects only need columns (pr_closed_needed_columns) + # removes columns that cannot be NULL (pr_closed_not_null_columns) + input_df = df_tuple[df_type["pr_all"]] + needed_columns = ['repo_id', 'repo_name', 'closed_year', 'closed_yearmonth', group_by_bars, 'commit_count'] + input_df = filter_data(input_df, needed_columns) - error = { - "message": "Invalid end_date. end_date is before the start_date", - "status_code": 400 - } + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - return int(repo_id), None, None, error + # print(input_df.to_string()) - else: - error = { - "message": "repo_id not specified. Use this endpoint to get a list of available repos: http:///api/unstable/repos", - "status_code": 400 - } - return None, None, None, error + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} - @server.app.route('/{}/pull_request_reports/average_commits_per_PR/'.format(AUGUR_API_VERSION), methods=["GET"]) - def average_commits_per_PR(): + driver_df = input_df.copy() # deep copy input data so we do not change the external dataframe - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + # Change closed year to int so that doesn't display as 2019.0 for example + driver_df['closed_year'] = driver_df['closed_year'].astype(int).astype(str) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) + # defaults to year + x_axis = 'closed_year' + x_groups = sorted(list(driver_df[x_axis].unique())) - group_by = str(request.args.get('group_by', "month")) - return_json = request.args.get('return_json', "false") + if group_by == 'month': + x_axis = "closed_yearmonth" + x_groups = np.unique(np.datetime_as_string(input_df[x_axis], unit='M')) - df_type = get_df_tuple_locations() + # inner groups on x_axis they are merged and not_merged + groups = list(driver_df[group_by_bars].unique()) - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + # setup color pallete + try: + colors = mpl['Plasma'][len(groups)] + except: + colors = [mpl['Plasma'][3][0]] + [mpl['Plasma'][3][1]] - y_axis = 'num_commits' - group_by_bars = 'merged_flag' - description = 'All' + merged_avg_values = list(driver_df.loc[driver_df[group_by_bars] == 'Merged / Accepted'].groupby([x_axis], + as_index=False).mean().round( + 1)['commit_count']) + not_merged_avg_values = list( + driver_df.loc[driver_df[group_by_bars] == 'Not Merged / Rejected'].groupby([x_axis], + as_index=False).mean().round(1)[ + 'commit_count']) - # gets pr_all data - # selects only need columns (pr_closed_needed_columns) - # removes columns that cannot be NULL (pr_closed_not_null_columns) - input_df = df_tuple[df_type["pr_all"]] - needed_columns = ['repo_id', 'repo_name', 'closed_year', 'closed_yearmonth', group_by_bars, 'commit_count'] - input_df = filter_data(input_df, needed_columns) + # Setup data in format for grouped bar chart + data = { + 'years': x_groups, + 'Merged / Accepted': merged_avg_values, + 'Not Merged / Rejected': not_merged_avg_values, + } - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + x = [(year, pr_state) for year in x_groups for pr_state in groups] + counts = sum(zip(data['Merged / Accepted'], data['Not Merged / Rejected']), ()) - # print(input_df.to_string()) + source = ColumnDataSource(data=dict(x=x, counts=counts)) - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + title_beginning = '{}: '.format(repo_dict[repo_id]) + title = "{}Average Commit Counts Per Year for {} Pull Requests".format(title_beginning, description) - driver_df = input_df.copy() # deep copy input data so we do not change the external dataframe + plot_width = len(x_groups) * 300 + title_text_font_size = 16 - # Change closed year to int so that doesn't display as 2019.0 for example - driver_df['closed_year'] = driver_df['closed_year'].astype(int).astype(str) + if (len(title) * title_text_font_size / 2) > plot_width: + plot_width = int(len(title) * title_text_font_size / 2) + 40 - # defaults to year - x_axis = 'closed_year' - x_groups = sorted(list(driver_df[x_axis].unique())) - - if group_by == 'month': - x_axis = "closed_yearmonth" - x_groups = np.unique(np.datetime_as_string(input_df[x_axis], unit='M')) - - # inner groups on x_axis they are merged and not_merged - groups = list(driver_df[group_by_bars].unique()) - - # setup color pallete - try: - colors = mpl['Plasma'][len(groups)] - except: - colors = [mpl['Plasma'][3][0]] + [mpl['Plasma'][3][1]] - - merged_avg_values = list(driver_df.loc[driver_df[group_by_bars] == 'Merged / Accepted'].groupby([x_axis], - as_index=False).mean().round( - 1)['commit_count']) - not_merged_avg_values = list( - driver_df.loc[driver_df[group_by_bars] == 'Not Merged / Rejected'].groupby([x_axis], - as_index=False).mean().round(1)[ - 'commit_count']) - - # Setup data in format for grouped bar chart - data = { - 'years': x_groups, - 'Merged / Accepted': merged_avg_values, - 'Not Merged / Rejected': not_merged_avg_values, - } + p = figure(x_range=FactorRange(*x), plot_height=450, plot_width=plot_width, title=title, + y_range=(0, max(merged_avg_values + not_merged_avg_values) * 1.15), toolbar_location=None) - x = [(year, pr_state) for year in x_groups for pr_state in groups] - counts = sum(zip(data['Merged / Accepted'], data['Not Merged / Rejected']), ()) + # Vertical bar glyph + p.vbar(x='x', top='counts', width=0.9, source=source, line_color="white", + fill_color=factor_cmap('x', palette=colors, factors=groups, start=1, end=2)) - source = ColumnDataSource(data=dict(x=x, counts=counts)) + # Data label + labels = LabelSet(x='x', y='counts', text='counts', # y_offset=-8, x_offset=34, + text_font_size="12pt", text_color="black", + source=source, text_align='center') + p.add_layout(labels) - title_beginning = '{}: '.format(repo_dict[repo_id]) - title = "{}Average Commit Counts Per Year for {} Pull Requests".format(title_beginning, description) + p.y_range.start = 0 + p.x_range.range_padding = 0.1 + p.xaxis.major_label_orientation = 1 + p.xgrid.grid_line_color = None - plot_width = len(x_groups) * 300 - title_text_font_size = 16 + p.yaxis.axis_label = 'Average Commits / Pull Request' + p.xaxis.axis_label = 'Year Closed' - if (len(title) * title_text_font_size / 2) > plot_width: - plot_width = int(len(title) * title_text_font_size / 2) + 40 + p.title.align = "center" + p.title.text_font_size = "{}px".format(title_text_font_size) - p = figure(x_range=FactorRange(*x), plot_height=450, plot_width=plot_width, title=title, - y_range=(0, max(merged_avg_values + not_merged_avg_values) * 1.15), toolbar_location=None) + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "15px" - # Vertical bar glyph - p.vbar(x='x', top='counts', width=0.9, source=source, line_color="white", - fill_color=factor_cmap('x', palette=colors, factors=groups, start=1, end=2)) + p.yaxis.axis_label_text_font_size = "15px" + p.yaxis.major_label_text_font_size = "15px" - # Data label - labels = LabelSet(x='x', y='counts', text='counts', # y_offset=-8, x_offset=34, - text_font_size="12pt", text_color="black", - source=source, text_align='center') - p.add_layout(labels) + plot = p - p.y_range.start = 0 - p.x_range.range_padding = 0.1 - p.xaxis.major_label_orientation = 1 - p.xgrid.grid_line_color = None + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the average commits per pull requests over an entire year," \ + " for merged and not merged pull requests." + p = add_caption_to_plot(p, caption) - p.yaxis.axis_label = 'Average Commits / Pull Request' - p.xaxis.axis_label = 'Year Closed' + caption_plot = p - p.title.align = "center" - p.title.text_font_size = "{}px".format(title_text_font_size) + grid = gridplot([[plot], [caption_plot]]) - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "15px" + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "average_commits_per_PR")), + mimetype='application/json', + status=200) - p.yaxis.axis_label_text_font_size = "15px" - p.yaxis.major_label_text_font_size = "15px" + var.headers["Access-Control-Allow-Orgin"] = "*" - plot = p + return var - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the average commits per pull requests over an entire year," \ - " for merged and not merged pull requests." - p = add_caption_to_plot(p, caption) + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(grid, timeout=180) - caption_plot = p + return send_file(filename) - grid = gridplot([[plot], [caption_plot]]) +@app.route('/{}/pull_request_reports/average_comments_per_PR/'.format(AUGUR_API_VERSION), methods=["GET"]) +def average_comments_per_PR(): - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "average_commits_per_PR")), - mimetype='application/json', - status=200) + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - var.headers["Access-Control-Allow-Orgin"] = "*" + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - return var + return_json = request.args.get('return_json', "false") - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(grid, timeout=180) + df_type = get_df_tuple_locations() - return send_file(filename) + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - @server.app.route('/{}/pull_request_reports/average_comments_per_PR/'.format(AUGUR_API_VERSION), methods=["GET"]) - def average_comments_per_PR(): + group_by = 'merged_flag' + x_axis = 'comment_count' + description = "All Closed" + y_axis = 'closed_year' - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + # gets pr_closed data + # selects only need columns (pr_closed_needed_columns) + # removes columns that cannot be NULL (pr_closed_not_null_columns) + input_df = df_tuple[df_type["pr_closed"]] + needed_columns = ['repo_id', 'repo_name', y_axis, group_by, x_axis] + not_null_columns = needed_columns + input_df = filter_data(input_df, needed_columns) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - return_json = request.args.get('return_json', "false") + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} - df_type = get_df_tuple_locations() + driver_df = input_df.copy() - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + try: + y_groups = sorted(list(driver_df[y_axis].unique())) + except: + y_groups = [repo_id] - group_by = 'merged_flag' - x_axis = 'comment_count' - description = "All Closed" - y_axis = 'closed_year' + groups = driver_df[group_by].unique() + try: + colors = mpl['Plasma'][len(groups)] + except: + colors = [mpl['Plasma'][3][0]] + [mpl['Plasma'][3][1]] - # gets pr_closed data - # selects only need columns (pr_closed_needed_columns) - # removes columns that cannot be NULL (pr_closed_not_null_columns) - input_df = df_tuple[df_type["pr_closed"]] - needed_columns = ['repo_id', 'repo_name', y_axis, group_by, x_axis] - not_null_columns = needed_columns - input_df = filter_data(input_df, needed_columns) + len_not_merged = len(driver_df.loc[driver_df['merged_flag'] == 'Not Merged / Rejected']) + len_merged = len(driver_df.loc[driver_df['merged_flag'] == 'Merged / Accepted']) - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + title_beginning = '{}: '.format(repo_dict[repo_id]) + plot_width = 650 + p = figure(y_range=y_groups, plot_height=450, plot_width=plot_width, + # y_range=y_groups,#(pr_all[y_axis].min(),pr_all[y_axis].max()) #y_axis_type="datetime", + title='{} {}'.format(title_beginning, "Mean Comments for {} Pull Requests".format(description)), + toolbar_location=None) - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + possible_maximums = [] + for y_value in y_groups: - driver_df = input_df.copy() + y_merged_data = driver_df.loc[ + (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Merged / Accepted')] + y_not_merged_data = driver_df.loc[ + (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Not Merged / Rejected')] - try: - y_groups = sorted(list(driver_df[y_axis].unique())) - except: - y_groups = [repo_id] - - groups = driver_df[group_by].unique() - try: - colors = mpl['Plasma'][len(groups)] - except: - colors = [mpl['Plasma'][3][0]] + [mpl['Plasma'][3][1]] - - len_not_merged = len(driver_df.loc[driver_df['merged_flag'] == 'Not Merged / Rejected']) - len_merged = len(driver_df.loc[driver_df['merged_flag'] == 'Merged / Accepted']) - - title_beginning = '{}: '.format(repo_dict[repo_id]) - plot_width = 650 - p = figure(y_range=y_groups, plot_height=450, plot_width=plot_width, - # y_range=y_groups,#(pr_all[y_axis].min(),pr_all[y_axis].max()) #y_axis_type="datetime", - title='{} {}'.format(title_beginning, "Mean Comments for {} Pull Requests".format(description)), - toolbar_location=None) - - possible_maximums = [] - for y_value in y_groups: - - y_merged_data = driver_df.loc[ - (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Merged / Accepted')] - y_not_merged_data = driver_df.loc[ - (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Not Merged / Rejected')] - - if len(y_merged_data) > 0: - y_merged_data_mean = y_merged_data[x_axis].mean() - - if (math.isnan(y_merged_data_mean)): - return Response( - response="There is no message data for this repo, in the database you are accessing", - mimetype='application/json', status=200) - else: - y_merged_data[x_axis + '_mean'] = y_merged_data_mean.round(1) + if len(y_merged_data) > 0: + y_merged_data_mean = y_merged_data[x_axis].mean() + if (math.isnan(y_merged_data_mean)): + return Response( + response="There is no message data for this repo, in the database you are accessing", + mimetype='application/json', status=200) else: - y_merged_data[x_axis + '_mean'] = 0 + y_merged_data[x_axis + '_mean'] = y_merged_data_mean.round(1) - if len(y_not_merged_data) > 0: - y_not_merged_data_mean = y_not_merged_data[x_axis].mean() + else: + y_merged_data[x_axis + '_mean'] = 0 - if math.isnan(y_not_merged_data_mean): - return Response( - response="There is no message data for this repo, in the database you are accessing", - mimetype='application/json', status=200) - else: - y_not_merged_data[x_axis + '_mean'] = y_not_merged_data_mean.round(1) + if len(y_not_merged_data) > 0: + y_not_merged_data_mean = y_not_merged_data[x_axis].mean() + if math.isnan(y_not_merged_data_mean): + return Response( + response="There is no message data for this repo, in the database you are accessing", + mimetype='application/json', status=200) else: - y_not_merged_data[x_axis + '_mean'] = 0 + y_not_merged_data[x_axis + '_mean'] = y_not_merged_data_mean.round(1) + + else: + y_not_merged_data[x_axis + '_mean'] = 0 - not_merged_source = ColumnDataSource(y_not_merged_data) - merged_source = ColumnDataSource(y_merged_data) + not_merged_source = ColumnDataSource(y_not_merged_data) + merged_source = ColumnDataSource(y_merged_data) - possible_maximums.append(max(y_not_merged_data[x_axis + '_mean'])) - possible_maximums.append(max(y_merged_data[x_axis + '_mean'])) + possible_maximums.append(max(y_not_merged_data[x_axis + '_mean'])) + possible_maximums.append(max(y_merged_data[x_axis + '_mean'])) - # mean comment count for merged - merged_comment_count_glyph = p.hbar(y=dodge(y_axis, -0.1, range=p.y_range), left=0, right=x_axis + '_mean', + # mean comment count for merged + merged_comment_count_glyph = p.hbar(y=dodge(y_axis, -0.1, range=p.y_range), left=0, right=x_axis + '_mean', + height=0.04 * len(driver_df[y_axis].unique()), + source=merged_source, + fill_color="black") # ,legend_label="Mean Days to Close", + # Data label + labels = LabelSet(x=x_axis + '_mean', y=dodge(y_axis, -0.1, range=p.y_range), text=x_axis + '_mean', + y_offset=-8, x_offset=34, + text_font_size="12pt", text_color="black", + source=merged_source, text_align='center') + p.add_layout(labels) + # mean comment count For nonmerged + not_merged_comment_count_glyph = p.hbar(y=dodge(y_axis, 0.1, range=p.y_range), left=0, + right=x_axis + '_mean', height=0.04 * len(driver_df[y_axis].unique()), - source=merged_source, - fill_color="black") # ,legend_label="Mean Days to Close", - # Data label - labels = LabelSet(x=x_axis + '_mean', y=dodge(y_axis, -0.1, range=p.y_range), text=x_axis + '_mean', - y_offset=-8, x_offset=34, - text_font_size="12pt", text_color="black", - source=merged_source, text_align='center') - p.add_layout(labels) - # mean comment count For nonmerged - not_merged_comment_count_glyph = p.hbar(y=dodge(y_axis, 0.1, range=p.y_range), left=0, - right=x_axis + '_mean', - height=0.04 * len(driver_df[y_axis].unique()), - source=not_merged_source, - fill_color="#e84d60") # legend_label="Mean Days to Close", - # Data label - labels = LabelSet(x=x_axis + '_mean', y=dodge(y_axis, 0.1, range=p.y_range), text=x_axis + '_mean', - y_offset=-8, x_offset=34, - text_font_size="12pt", text_color="#e84d60", - source=not_merged_source, text_align='center') - p.add_layout(labels) + source=not_merged_source, + fill_color="#e84d60") # legend_label="Mean Days to Close", + # Data label + labels = LabelSet(x=x_axis + '_mean', y=dodge(y_axis, 0.1, range=p.y_range), text=x_axis + '_mean', + y_offset=-8, x_offset=34, + text_font_size="12pt", text_color="#e84d60", + source=not_merged_source, text_align='center') + p.add_layout(labels) - # p.y_range.range_padding = 0.1 - p.ygrid.grid_line_color = None - p.legend.location = "bottom_right" - p.axis.minor_tick_line_color = None - p.outline_line_color = None - p.xaxis.axis_label = 'Average Comments / Pull Request' - p.yaxis.axis_label = 'Repository' if y_axis == 'repo_name' else 'Year Closed' if y_axis == 'closed_year' else '' + # p.y_range.range_padding = 0.1 + p.ygrid.grid_line_color = None + p.legend.location = "bottom_right" + p.axis.minor_tick_line_color = None + p.outline_line_color = None + p.xaxis.axis_label = 'Average Comments / Pull Request' + p.yaxis.axis_label = 'Repository' if y_axis == 'repo_name' else 'Year Closed' if y_axis == 'closed_year' else '' - legend = Legend( - items=[ - ("Merged Pull Request Mean Comment Count", [merged_comment_count_glyph]), - ("Rejected Pull Request Mean Comment Count", [not_merged_comment_count_glyph]) - ], + legend = Legend( + items=[ + ("Merged Pull Request Mean Comment Count", [merged_comment_count_glyph]), + ("Rejected Pull Request Mean Comment Count", [not_merged_comment_count_glyph]) + ], - location='center', - orientation='vertical', - border_line_color="black" - ) - p.add_layout(legend, "below") + location='center', + orientation='vertical', + border_line_color="black" + ) + p.add_layout(legend, "below") - p.title.text_font_size = "16px" - p.title.align = "center" + p.title.text_font_size = "16px" + p.title.align = "center" - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "16px" + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "16px" - p.yaxis.axis_label_text_font_size = "16px" - p.yaxis.major_label_text_font_size = "16px" + p.yaxis.axis_label_text_font_size = "16px" + p.yaxis.major_label_text_font_size = "16px" - p.x_range = Range1d(0, max(possible_maximums) * 1.15) + p.x_range = Range1d(0, max(possible_maximums) * 1.15) - plot = p + plot = p - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the average number of comments per merged or not merged pull request." + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the average number of comments per merged or not merged pull request." - p = add_caption_to_plot(p, caption) + p = add_caption_to_plot(p, caption) - caption_plot = p + caption_plot = p - grid = gridplot([[plot], [caption_plot]]) + grid = gridplot([[plot], [caption_plot]]) - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "average_comments_per_PR")), - mimetype='application/json', - status=200) + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "average_comments_per_PR")), + mimetype='application/json', + status=200) - var.headers["Access-Control-Allow-Orgin"] = "*" + var.headers["Access-Control-Allow-Orgin"] = "*" - return var + return var - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(grid, timeout=180) + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(grid, timeout=180) - return send_file(filename) + return send_file(filename) - @server.app.route('/{}/pull_request_reports/PR_counts_by_merged_status/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def PR_counts_by_merged_status(): +@app.route('/{}/pull_request_reports/PR_counts_by_merged_status/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def PR_counts_by_merged_status(): - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - return_json = request.args.get('return_json', "false") + return_json = request.args.get('return_json', "false") - x_axis = 'closed_year' - description = 'All Closed' + x_axis = 'closed_year' + description = 'All Closed' - df_type = get_df_tuple_locations() + df_type = get_df_tuple_locations() - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - # gets pr_closed data - # selects only need columns (pr_closed_needed_columns) - # removes columns that cannot be NULL (pr_closed_not_null_columns) - pr_closed = df_tuple[df_type["pr_closed"]] - pr_closed_needed_columns = ['repo_id', 'repo_name', x_axis, 'merged_flag'] - pr_closed = filter_data(pr_closed, pr_closed_needed_columns) + # gets pr_closed data + # selects only need columns (pr_closed_needed_columns) + # removes columns that cannot be NULL (pr_closed_not_null_columns) + pr_closed = df_tuple[df_type["pr_closed"]] + pr_closed_needed_columns = ['repo_id', 'repo_name', x_axis, 'merged_flag'] + pr_closed = filter_data(pr_closed, pr_closed_needed_columns) - # gets pr_slow20_not_merged data - # selects only need columns (pr_slow20_not_merged_needed_columns) - # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) - pr_slow20_not_merged = df_tuple[df_type["pr_slow20_not_merged"]] - pr_slow20_not_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'merged_flag'] - pr_slow20_not_merged = filter_data(pr_slow20_not_merged, pr_slow20_not_merged_needed_columns,) + # gets pr_slow20_not_merged data + # selects only need columns (pr_slow20_not_merged_needed_columns) + # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) + pr_slow20_not_merged = df_tuple[df_type["pr_slow20_not_merged"]] + pr_slow20_not_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'merged_flag'] + pr_slow20_not_merged = filter_data(pr_slow20_not_merged, pr_slow20_not_merged_needed_columns,) - # gets pr_slow20_merged data - # selects only need columns (pr_slow20_not_merged_needed_columns) - # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) - pr_slow20_merged = df_tuple[df_type["pr_slow20_merged"]] - pr_slow20_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'merged_flag'] - pr_slow20_merged = filter_data(pr_slow20_merged, pr_slow20_merged_needed_columns) + # gets pr_slow20_merged data + # selects only need columns (pr_slow20_not_merged_needed_columns) + # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) + pr_slow20_merged = df_tuple[df_type["pr_slow20_merged"]] + pr_slow20_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'merged_flag'] + pr_slow20_merged = filter_data(pr_slow20_merged, pr_slow20_merged_needed_columns) - if len(pr_closed) == 0 or len(pr_slow20_not_merged) == 0 or len(pr_slow20_merged) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + if len(pr_closed) == 0 or len(pr_slow20_not_merged) == 0 or len(pr_slow20_merged) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} + repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} - data_dict = {'All': pr_closed, 'Slowest 20%': pr_slow20_not_merged.append(pr_slow20_merged, ignore_index=True)} + data_dict = {'All': pr_closed, 'Slowest 20%': pr_slow20_not_merged.append(pr_slow20_merged, ignore_index=True)} - colors = mpl['Plasma'][6] + colors = mpl['Plasma'][6] - for data_desc, input_df in data_dict.items(): - x_groups = sorted(list(input_df[x_axis].astype(str).unique())) - break + for data_desc, input_df in data_dict.items(): + x_groups = sorted(list(input_df[x_axis].astype(str).unique())) + break - plot_width = 315 * len(x_groups) + plot_width = 315 * len(x_groups) - if plot_width < 900: - plot_width = 900 - title_beginning = repo_dict[repo_id] - p = figure(x_range=x_groups, plot_height=350, plot_width=plot_width, - title='{}: {}'.format(title_beginning, - "Count of {} Pull Requests by Merged Status".format(description)), - toolbar_location=None) + if plot_width < 900: + plot_width = 900 + title_beginning = repo_dict[repo_id] + p = figure(x_range=x_groups, plot_height=350, plot_width=plot_width, + title='{}: {}'.format(title_beginning, + "Count of {} Pull Requests by Merged Status".format(description)), + toolbar_location=None) - dodge_amount = 0.12 - color_index = 0 - x_offset = 60 + dodge_amount = 0.12 + color_index = 0 + x_offset = 60 - all_totals = [] - for data_desc, input_df in data_dict.items(): - driver_df = input_df.copy() + all_totals = [] + for data_desc, input_df in data_dict.items(): + driver_df = input_df.copy() - driver_df[x_axis] = driver_df[x_axis].astype(str) + driver_df[x_axis] = driver_df[x_axis].astype(str) - groups = sorted(list(driver_df['merged_flag'].unique())) + groups = sorted(list(driver_df['merged_flag'].unique())) - driver_df = driver_df.loc[driver_df['repo_id'] == repo_id] + driver_df = driver_df.loc[driver_df['repo_id'] == repo_id] - len_merged = [] - zeros = [] - len_not_merged = [] - totals = [] + len_merged = [] + zeros = [] + len_not_merged = [] + totals = [] + for x_group in x_groups: + len_merged_entry = len( + driver_df.loc[(driver_df['merged_flag'] == 'Merged / Accepted') & (driver_df[x_axis] == x_group)]) + totals += [len(driver_df.loc[(driver_df['merged_flag'] == 'Not Merged / Rejected') & ( + driver_df[x_axis] == x_group)]) + len_merged_entry] + len_not_merged += [len(driver_df.loc[(driver_df['merged_flag'] == 'Not Merged / Rejected') & ( + driver_df[x_axis] == x_group)])] + len_merged += [len_merged_entry] + zeros.append(0) + + data = {'X': x_groups} + for group in groups: + data[group] = [] for x_group in x_groups: - len_merged_entry = len( - driver_df.loc[(driver_df['merged_flag'] == 'Merged / Accepted') & (driver_df[x_axis] == x_group)]) - totals += [len(driver_df.loc[(driver_df['merged_flag'] == 'Not Merged / Rejected') & ( - driver_df[x_axis] == x_group)]) + len_merged_entry] - len_not_merged += [len(driver_df.loc[(driver_df['merged_flag'] == 'Not Merged / Rejected') & ( - driver_df[x_axis] == x_group)])] - len_merged += [len_merged_entry] - zeros.append(0) - - data = {'X': x_groups} - for group in groups: - data[group] = [] - for x_group in x_groups: - data[group] += [ - len(driver_df.loc[(driver_df['merged_flag'] == group) & (driver_df[x_axis] == x_group)])] - - data['len_merged'] = len_merged - data['len_not_merged'] = len_not_merged - data['totals'] = totals - data['zeros'] = zeros - - if data_desc == "All": - all_totals = totals - - source = ColumnDataSource(data) - - stacked_bar = p.vbar_stack(groups, x=dodge('X', dodge_amount, range=p.x_range), width=0.2, source=source, - color=colors[1:3], legend_label=[f"{data_desc} " + "%s" % x for x in groups]) - # Data label for merged - - p.add_layout( - LabelSet(x=dodge('X', dodge_amount, range=p.x_range), y='zeros', text='len_merged', y_offset=2, - x_offset=x_offset, - text_font_size="12pt", text_color=colors[1:3][0], - source=source, text_align='center') - ) - if min(data['totals']) < 400: - y_offset = 15 - else: - y_offset = 0 - # Data label for not merged - p.add_layout( - LabelSet(x=dodge('X', dodge_amount, range=p.x_range), y='totals', text='len_not_merged', - y_offset=y_offset, x_offset=x_offset, - text_font_size="12pt", text_color=colors[1:3][1], - source=source, text_align='center') - ) - # Data label for total - p.add_layout( - LabelSet(x=dodge('X', dodge_amount, range=p.x_range), y='totals', text='totals', y_offset=0, x_offset=0, - text_font_size="12pt", text_color='black', - source=source, text_align='center') - ) - dodge_amount *= -1 - colors = colors[::-1] - x_offset *= -1 - - p.y_range = Range1d(0, max(all_totals) * 1.4) - - p.xgrid.grid_line_color = None - p.legend.location = "top_center" - p.legend.orientation = "horizontal" - p.axis.minor_tick_line_color = None - p.outline_line_color = None - p.yaxis.axis_label = 'Count of Pull Requests' - p.xaxis.axis_label = 'Repository' if x_axis == 'repo_name' else 'Year Closed' if x_axis == 'closed_year' else '' + data[group] += [ + len(driver_df.loc[(driver_df['merged_flag'] == group) & (driver_df[x_axis] == x_group)])] - p.title.align = "center" - p.title.text_font_size = "16px" + data['len_merged'] = len_merged + data['len_not_merged'] = len_not_merged + data['totals'] = totals + data['zeros'] = zeros - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "16px" + if data_desc == "All": + all_totals = totals - p.yaxis.axis_label_text_font_size = "16px" - p.yaxis.major_label_text_font_size = "16px" + source = ColumnDataSource(data) - p.outline_line_color = None + stacked_bar = p.vbar_stack(groups, x=dodge('X', dodge_amount, range=p.x_range), width=0.2, source=source, + color=colors[1:3], legend_label=[f"{data_desc} " + "%s" % x for x in groups]) + # Data label for merged - plot = p + p.add_layout( + LabelSet(x=dodge('X', dodge_amount, range=p.x_range), y='zeros', text='len_merged', y_offset=2, + x_offset=x_offset, + text_font_size="12pt", text_color=colors[1:3][0], + source=source, text_align='center') + ) + if min(data['totals']) < 400: + y_offset = 15 + else: + y_offset = 0 + # Data label for not merged + p.add_layout( + LabelSet(x=dodge('X', dodge_amount, range=p.x_range), y='totals', text='len_not_merged', + y_offset=y_offset, x_offset=x_offset, + text_font_size="12pt", text_color=colors[1:3][1], + source=source, text_align='center') + ) + # Data label for total + p.add_layout( + LabelSet(x=dodge('X', dodge_amount, range=p.x_range), y='totals', text='totals', y_offset=0, x_offset=0, + text_font_size="12pt", text_color='black', + source=source, text_align='center') + ) + dodge_amount *= -1 + colors = colors[::-1] + x_offset *= -1 - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the number of closed pull requests per year in " \ - "four different categories. These four categories are All Merged, All Not Merged," \ - " Slowest 20% Merged, and Slowest 20% Not Merged." - p = add_caption_to_plot(p, caption) + p.y_range = Range1d(0, max(all_totals) * 1.4) - caption_plot = p + p.xgrid.grid_line_color = None + p.legend.location = "top_center" + p.legend.orientation = "horizontal" + p.axis.minor_tick_line_color = None + p.outline_line_color = None + p.yaxis.axis_label = 'Count of Pull Requests' + p.xaxis.axis_label = 'Repository' if x_axis == 'repo_name' else 'Year Closed' if x_axis == 'closed_year' else '' - grid = gridplot([[plot], [caption_plot]]) + p.title.align = "center" + p.title.text_font_size = "16px" - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "PR_counts_by_merged_status")), - mimetype='application/json', - status=200) + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "16px" - var.headers["Access-Control-Allow-Orgin"] = "*" + p.yaxis.axis_label_text_font_size = "16px" + p.yaxis.major_label_text_font_size = "16px" - return var + p.outline_line_color = None - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(grid, timeout=180) + plot = p - return send_file(filename) + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the number of closed pull requests per year in " \ + "four different categories. These four categories are All Merged, All Not Merged," \ + " Slowest 20% Merged, and Slowest 20% Not Merged." + p = add_caption_to_plot(p, caption) - @server.app.route('/{}/pull_request_reports/mean_response_times_for_PR/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def mean_response_times_for_PR(): + caption_plot = p - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + grid = gridplot([[plot], [caption_plot]]) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "PR_counts_by_merged_status")), + mimetype='application/json', + status=200) - return_json = request.args.get('return_json', "false") + var.headers["Access-Control-Allow-Orgin"] = "*" - df_type = get_df_tuple_locations() + return var - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(grid, timeout=180) - time_unit = 'days' - x_max = 95 - y_axis = 'closed_year' - description = "All Closed" - legend_position = (410, 10) + return send_file(filename) - # gets pr_closed data - # selects only need columns (pr_closed_needed_columns) - # removes columns that cannot be NULL (pr_closed_not_null_columns) - input_df = df_tuple[df_type["pr_closed"]] - needed_columns = ['repo_id', 'repo_name', y_axis, 'merged_flag', time_unit + '_to_first_response', - time_unit + '_to_last_response', time_unit + '_to_close'] - input_df = filter_data(input_df, needed_columns) +@app.route('/{}/pull_request_reports/mean_response_times_for_PR/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def mean_response_times_for_PR(): - if len(input_df) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - driver_df = input_df.copy() # deep copy input data so we do not alter the external dataframe + return_json = request.args.get('return_json', "false") - title_beginning = '{}: '.format(repo_dict[repo_id]) - plot_width = 950 - p = figure(toolbar_location=None, y_range=sorted(driver_df[y_axis].unique()), plot_width=plot_width, - plot_height=450, # 75*len(driver_df[y_axis].unique()), - title="{}Mean Response Times for Pull Requests {}".format(title_beginning, description)) + df_type = get_df_tuple_locations() - first_response_glyphs = [] - last_response_glyphs = [] - merged_days_to_close_glyphs = [] - not_merged_days_to_close_glyphs = [] + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - possible_maximums = [] + time_unit = 'days' + x_max = 95 + y_axis = 'closed_year' + description = "All Closed" + legend_position = (410, 10) - # FIXME repo_set is not defined - # setup color pallete - try: - colors = Colorblind[len(repo_set)] - except: - colors = Colorblind[3] - - y_merged_data_list = [] - y_not_merged_data_list = [] - - # calculate data frist time to obtain the maximum and make sure there is message data - for y_value in driver_df[y_axis].unique(): - - y_merged_data = driver_df.loc[ - (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Merged / Accepted')] - y_not_merged_data = driver_df.loc[ - (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Not Merged / Rejected')] - - if len(y_merged_data) > 0: - - y_merged_data_first_response_mean = y_merged_data[time_unit + '_to_first_response'].mean() - y_merged_data_last_response_mean = y_merged_data[time_unit + '_to_last_response'].mean() - y_merged_data_to_close_mean = y_merged_data[time_unit + '_to_close'].mean() - - if (math.isnan(y_merged_data_first_response_mean) or math.isnan( - y_merged_data_last_response_mean) or math.isnan(y_merged_data_to_close_mean)): - return Response( - response="There is no message data for this repo, in the database you are accessing", - mimetype='application/json', status=200) - else: - y_merged_data[time_unit + '_to_first_response_mean'] = y_merged_data_first_response_mean.round(1) - y_merged_data[time_unit + '_to_last_response_mean'] = y_merged_data_last_response_mean.round(1) - y_merged_data[time_unit + '_to_close_mean'] = y_merged_data_to_close_mean.round(1) + # gets pr_closed data + # selects only need columns (pr_closed_needed_columns) + # removes columns that cannot be NULL (pr_closed_not_null_columns) + input_df = df_tuple[df_type["pr_closed"]] + needed_columns = ['repo_id', 'repo_name', y_axis, 'merged_flag', time_unit + '_to_first_response', + time_unit + '_to_last_response', time_unit + '_to_close'] + input_df = filter_data(input_df, needed_columns) + + if len(input_df) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) + + repo_dict = {repo_id: input_df.loc[input_df['repo_id'] == repo_id].iloc[0]['repo_name']} + + driver_df = input_df.copy() # deep copy input data so we do not alter the external dataframe + + title_beginning = '{}: '.format(repo_dict[repo_id]) + plot_width = 950 + p = figure(toolbar_location=None, y_range=sorted(driver_df[y_axis].unique()), plot_width=plot_width, + plot_height=450, # 75*len(driver_df[y_axis].unique()), + title="{}Mean Response Times for Pull Requests {}".format(title_beginning, description)) + + first_response_glyphs = [] + last_response_glyphs = [] + merged_days_to_close_glyphs = [] + not_merged_days_to_close_glyphs = [] + + possible_maximums = [] + + # FIXME repo_set is not defined + # setup color pallete + try: + colors = Colorblind[len(repo_set)] + except: + colors = Colorblind[3] + + y_merged_data_list = [] + y_not_merged_data_list = [] + + # calculate data frist time to obtain the maximum and make sure there is message data + for y_value in driver_df[y_axis].unique(): + + y_merged_data = driver_df.loc[ + (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Merged / Accepted')] + y_not_merged_data = driver_df.loc[ + (driver_df[y_axis] == y_value) & (driver_df['merged_flag'] == 'Not Merged / Rejected')] + + if len(y_merged_data) > 0: + + y_merged_data_first_response_mean = y_merged_data[time_unit + '_to_first_response'].mean() + y_merged_data_last_response_mean = y_merged_data[time_unit + '_to_last_response'].mean() + y_merged_data_to_close_mean = y_merged_data[time_unit + '_to_close'].mean() + + if (math.isnan(y_merged_data_first_response_mean) or math.isnan( + y_merged_data_last_response_mean) or math.isnan(y_merged_data_to_close_mean)): + return Response( + response="There is no message data for this repo, in the database you are accessing", + mimetype='application/json', status=200) else: - y_merged_data[time_unit + '_to_first_response_mean'] = 0.00 - y_merged_data[time_unit + '_to_last_response_mean'] = 0.00 - y_merged_data[time_unit + '_to_close_mean'] = 0.00 - - if len(y_not_merged_data) > 0: - - y_not_merged_data_first_response_mean = y_not_merged_data[time_unit + '_to_first_response'].mean() - y_not_merged_data_last_response_mean = y_not_merged_data[time_unit + '_to_last_response'].mean() - y_not_merged_data_to_close_mean = y_not_merged_data[time_unit + '_to_close'].mean() - - if (math.isnan(y_not_merged_data_first_response_mean) or math.isnan( - y_not_merged_data_last_response_mean) or math.isnan(y_not_merged_data_to_close_mean)): - return Response( - response="There is no message data for this repo, in the database you are accessing", - mimetype='application/json', status=200) - else: - y_not_merged_data[ - time_unit + '_to_first_response_mean'] = y_not_merged_data_first_response_mean.round(1) - y_not_merged_data[ - time_unit + '_to_last_response_mean'] = y_not_merged_data_last_response_mean.round(1) - y_not_merged_data[time_unit + '_to_close_mean'] = y_not_merged_data_to_close_mean.round(1) + y_merged_data[time_unit + '_to_first_response_mean'] = y_merged_data_first_response_mean.round(1) + y_merged_data[time_unit + '_to_last_response_mean'] = y_merged_data_last_response_mean.round(1) + y_merged_data[time_unit + '_to_close_mean'] = y_merged_data_to_close_mean.round(1) + else: + y_merged_data[time_unit + '_to_first_response_mean'] = 0.00 + y_merged_data[time_unit + '_to_last_response_mean'] = 0.00 + y_merged_data[time_unit + '_to_close_mean'] = 0.00 + + if len(y_not_merged_data) > 0: + + y_not_merged_data_first_response_mean = y_not_merged_data[time_unit + '_to_first_response'].mean() + y_not_merged_data_last_response_mean = y_not_merged_data[time_unit + '_to_last_response'].mean() + y_not_merged_data_to_close_mean = y_not_merged_data[time_unit + '_to_close'].mean() + + if (math.isnan(y_not_merged_data_first_response_mean) or math.isnan( + y_not_merged_data_last_response_mean) or math.isnan(y_not_merged_data_to_close_mean)): + return Response( + response="There is no message data for this repo, in the database you are accessing", + mimetype='application/json', status=200) else: - y_not_merged_data[time_unit + '_to_first_response_mean'] = 0.00 - y_not_merged_data[time_unit + '_to_last_response_mean'] = 0.00 - y_not_merged_data[time_unit + '_to_close_mean'] = 0.00 + y_not_merged_data[ + time_unit + '_to_first_response_mean'] = y_not_merged_data_first_response_mean.round(1) + y_not_merged_data[ + time_unit + '_to_last_response_mean'] = y_not_merged_data_last_response_mean.round(1) + y_not_merged_data[time_unit + '_to_close_mean'] = y_not_merged_data_to_close_mean.round(1) + else: + y_not_merged_data[time_unit + '_to_first_response_mean'] = 0.00 + y_not_merged_data[time_unit + '_to_last_response_mean'] = 0.00 + y_not_merged_data[time_unit + '_to_close_mean'] = 0.00 - possible_maximums.append(max(y_merged_data[time_unit + '_to_close_mean'])) - possible_maximums.append(max(y_not_merged_data[time_unit + '_to_close_mean'])) + possible_maximums.append(max(y_merged_data[time_unit + '_to_close_mean'])) + possible_maximums.append(max(y_not_merged_data[time_unit + '_to_close_mean'])) - maximum = max(possible_maximums) * 1.15 - ideal_difference = maximum * 0.064 + maximum = max(possible_maximums) * 1.15 + ideal_difference = maximum * 0.064 - y_merged_data_list.append(y_merged_data) - y_not_merged_data_list.append(y_not_merged_data) + y_merged_data_list.append(y_merged_data) + y_not_merged_data_list.append(y_not_merged_data) - # loop through data and add it to the plot - for index in range(0, len(y_merged_data_list)): + # loop through data and add it to the plot + for index in range(0, len(y_merged_data_list)): - y_merged_data = y_merged_data_list[index] - y_not_merged_data = y_not_merged_data_list[index] + y_merged_data = y_merged_data_list[index] + y_not_merged_data = y_not_merged_data_list[index] - not_merged_source = ColumnDataSource(y_not_merged_data) - merged_source = ColumnDataSource(y_merged_data) + not_merged_source = ColumnDataSource(y_not_merged_data) + merged_source = ColumnDataSource(y_merged_data) - # mean PR length for merged - merged_days_to_close_glyph = p.hbar(y=dodge(y_axis, -0.1, range=p.y_range), left=0, + # mean PR length for merged + merged_days_to_close_glyph = p.hbar(y=dodge(y_axis, -0.1, range=p.y_range), left=0, + right=time_unit + '_to_close_mean', + height=0.04 * len(driver_df[y_axis].unique()), + source=merged_source, + fill_color="black") # ,legend_label="Mean Days to Close", + merged_days_to_close_glyphs.append(merged_days_to_close_glyph) + # Data label + labels = LabelSet(x=time_unit + '_to_close_mean', y=dodge(y_axis, -0.1, range=p.y_range), + text=time_unit + '_to_close_mean', y_offset=-8, x_offset=34, # 34 + text_font_size="12pt", text_color="black", + source=merged_source, text_align='center') + p.add_layout(labels) + + # mean PR length For nonmerged + not_merged_days_to_close_glyph = p.hbar(y=dodge(y_axis, 0.1, range=p.y_range), left=0, right=time_unit + '_to_close_mean', height=0.04 * len(driver_df[y_axis].unique()), - source=merged_source, - fill_color="black") # ,legend_label="Mean Days to Close", - merged_days_to_close_glyphs.append(merged_days_to_close_glyph) - # Data label - labels = LabelSet(x=time_unit + '_to_close_mean', y=dodge(y_axis, -0.1, range=p.y_range), - text=time_unit + '_to_close_mean', y_offset=-8, x_offset=34, # 34 - text_font_size="12pt", text_color="black", - source=merged_source, text_align='center') - p.add_layout(labels) + source=not_merged_source, + fill_color="#e84d60") # legend_label="Mean Days to Close", + not_merged_days_to_close_glyphs.append(not_merged_days_to_close_glyph) + # Data label + labels = LabelSet(x=time_unit + '_to_close_mean', y=dodge(y_axis, 0.1, range=p.y_range), + text=time_unit + '_to_close_mean', y_offset=-8, x_offset=44, + text_font_size="12pt", text_color="#e84d60", + source=not_merged_source, text_align='center') + p.add_layout(labels) - # mean PR length For nonmerged - not_merged_days_to_close_glyph = p.hbar(y=dodge(y_axis, 0.1, range=p.y_range), left=0, - right=time_unit + '_to_close_mean', - height=0.04 * len(driver_df[y_axis].unique()), - source=not_merged_source, - fill_color="#e84d60") # legend_label="Mean Days to Close", - not_merged_days_to_close_glyphs.append(not_merged_days_to_close_glyph) - # Data label - labels = LabelSet(x=time_unit + '_to_close_mean', y=dodge(y_axis, 0.1, range=p.y_range), - text=time_unit + '_to_close_mean', y_offset=-8, x_offset=44, - text_font_size="12pt", text_color="#e84d60", - source=not_merged_source, text_align='center') - p.add_layout(labels) + # if the difference between two values is less than 6.4 percent move the second one to the right 30 pixels + if (max(y_merged_data[time_unit + '_to_last_response_mean']) - max( + y_merged_data[time_unit + '_to_first_response_mean'])) < ideal_difference: + merged_x_offset = 30 + else: + merged_x_offset = 0 - # if the difference between two values is less than 6.4 percent move the second one to the right 30 pixels - if (max(y_merged_data[time_unit + '_to_last_response_mean']) - max( - y_merged_data[time_unit + '_to_first_response_mean'])) < ideal_difference: - merged_x_offset = 30 - else: - merged_x_offset = 0 + # if the difference between two values is less than 6.4 percent move the second one to the right 30 pixels + if (max(y_not_merged_data[time_unit + '_to_last_response_mean']) - max( + y_not_merged_data[time_unit + '_to_first_response_mean'])) < ideal_difference: + not_merged_x_offset = 30 + else: + not_merged_x_offset = 0 - # if the difference between two values is less than 6.4 percent move the second one to the right 30 pixels - if (max(y_not_merged_data[time_unit + '_to_last_response_mean']) - max( - y_not_merged_data[time_unit + '_to_first_response_mean'])) < ideal_difference: - not_merged_x_offset = 30 - else: - not_merged_x_offset = 0 + # if there is only one bar set the y_offsets so the labels will not overlap the bars + if len(driver_df[y_axis].unique()) == 1: + merged_y_offset = -65 + not_merged_y_offset = 45 + else: + merged_y_offset = -45 + not_merged_y_offset = 25 + + # mean time to first response + glyph = Rect(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, -0.1, range=p.y_range), + width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[0]) + first_response_glyph = p.add_glyph(merged_source, glyph) + first_response_glyphs.append(first_response_glyph) + # Data label + labels = LabelSet(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, 0, range=p.y_range), + text=time_unit + '_to_first_response_mean', x_offset=0, y_offset=merged_y_offset, # -60, + text_font_size="12pt", text_color=colors[0], + source=merged_source, text_align='center') + p.add_layout(labels) - # if there is only one bar set the y_offsets so the labels will not overlap the bars - if len(driver_df[y_axis].unique()) == 1: - merged_y_offset = -65 - not_merged_y_offset = 45 - else: - merged_y_offset = -45 - not_merged_y_offset = 25 - - # mean time to first response - glyph = Rect(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, -0.1, range=p.y_range), - width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[0]) - first_response_glyph = p.add_glyph(merged_source, glyph) - first_response_glyphs.append(first_response_glyph) - # Data label - labels = LabelSet(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, 0, range=p.y_range), - text=time_unit + '_to_first_response_mean', x_offset=0, y_offset=merged_y_offset, # -60, - text_font_size="12pt", text_color=colors[0], - source=merged_source, text_align='center') - p.add_layout(labels) + # for nonmerged + glyph = Rect(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, 0.1, range=p.y_range), + width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[0]) + first_response_glyph = p.add_glyph(not_merged_source, glyph) + first_response_glyphs.append(first_response_glyph) + # Data label + labels = LabelSet(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, 0, range=p.y_range), + text=time_unit + '_to_first_response_mean', x_offset=0, y_offset=not_merged_y_offset, + # 40, + text_font_size="12pt", text_color=colors[0], + source=not_merged_source, text_align='center') + p.add_layout(labels) - # for nonmerged - glyph = Rect(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, 0.1, range=p.y_range), - width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[0]) - first_response_glyph = p.add_glyph(not_merged_source, glyph) - first_response_glyphs.append(first_response_glyph) - # Data label - labels = LabelSet(x=time_unit + '_to_first_response_mean', y=dodge(y_axis, 0, range=p.y_range), - text=time_unit + '_to_first_response_mean', x_offset=0, y_offset=not_merged_y_offset, - # 40, - text_font_size="12pt", text_color=colors[0], - source=not_merged_source, text_align='center') - p.add_layout(labels) + # mean time to last response + glyph = Rect(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, -0.1, range=p.y_range), + width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[1]) + last_response_glyph = p.add_glyph(merged_source, glyph) + last_response_glyphs.append(last_response_glyph) + # Data label + labels = LabelSet(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, 0, range=p.y_range), + text=time_unit + '_to_last_response_mean', x_offset=merged_x_offset, + y_offset=merged_y_offset, # -60, + text_font_size="12pt", text_color=colors[1], + source=merged_source, text_align='center') + p.add_layout(labels) - # mean time to last response - glyph = Rect(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, -0.1, range=p.y_range), - width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[1]) - last_response_glyph = p.add_glyph(merged_source, glyph) - last_response_glyphs.append(last_response_glyph) - # Data label - labels = LabelSet(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, 0, range=p.y_range), - text=time_unit + '_to_last_response_mean', x_offset=merged_x_offset, - y_offset=merged_y_offset, # -60, - text_font_size="12pt", text_color=colors[1], - source=merged_source, text_align='center') - p.add_layout(labels) + # for nonmerged + glyph = Rect(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, 0.1, range=p.y_range), + width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[1]) + last_response_glyph = p.add_glyph(not_merged_source, glyph) + last_response_glyphs.append(last_response_glyph) + # Data label + labels = LabelSet(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, 0, range=p.y_range), + text=time_unit + '_to_last_response_mean', x_offset=not_merged_x_offset, + y_offset=not_merged_y_offset, # 40, + text_font_size="12pt", text_color=colors[1], + source=not_merged_source, text_align='center') + p.add_layout(labels) - # for nonmerged - glyph = Rect(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, 0.1, range=p.y_range), - width=x_max / 100, height=0.08 * len(driver_df[y_axis].unique()), fill_color=colors[1]) - last_response_glyph = p.add_glyph(not_merged_source, glyph) - last_response_glyphs.append(last_response_glyph) - # Data label - labels = LabelSet(x=time_unit + '_to_last_response_mean', y=dodge(y_axis, 0, range=p.y_range), - text=time_unit + '_to_last_response_mean', x_offset=not_merged_x_offset, - y_offset=not_merged_y_offset, # 40, - text_font_size="12pt", text_color=colors[1], - source=not_merged_source, text_align='center') - p.add_layout(labels) + p.title.align = "center" + p.title.text_font_size = "16px" - p.title.align = "center" - p.title.text_font_size = "16px" + p.xaxis.axis_label = "Days to Close" + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "16px" - p.xaxis.axis_label = "Days to Close" - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "16px" + # adjust the starting point and ending point based on the maximum of maximum of the graph + p.x_range = Range1d(maximum / 30 * -1, maximum * 1.15) - # adjust the starting point and ending point based on the maximum of maximum of the graph - p.x_range = Range1d(maximum / 30 * -1, maximum * 1.15) + p.yaxis.axis_label = "Repository" if y_axis == 'repo_name' else 'Year Closed' if y_axis == 'closed_year' else '' + p.yaxis.axis_label_text_font_size = "16px" + p.yaxis.major_label_text_font_size = "16px" + p.ygrid.grid_line_color = None + p.y_range.range_padding = 0.15 - p.yaxis.axis_label = "Repository" if y_axis == 'repo_name' else 'Year Closed' if y_axis == 'closed_year' else '' - p.yaxis.axis_label_text_font_size = "16px" - p.yaxis.major_label_text_font_size = "16px" - p.ygrid.grid_line_color = None - p.y_range.range_padding = 0.15 + p.outline_line_color = None + p.toolbar.logo = None + p.toolbar_location = None - p.outline_line_color = None - p.toolbar.logo = None - p.toolbar_location = None + def add_legend(location, orientation, side): + legend = Legend( + items=[ + ("Mean Days to First Response", first_response_glyphs), + ("Mean Days to Last Response", last_response_glyphs), + ("Merged Mean Days to Close", merged_days_to_close_glyphs), + ("Not Merged Mean Days to Close", not_merged_days_to_close_glyphs) + ], - def add_legend(location, orientation, side): - legend = Legend( - items=[ - ("Mean Days to First Response", first_response_glyphs), - ("Mean Days to Last Response", last_response_glyphs), - ("Merged Mean Days to Close", merged_days_to_close_glyphs), - ("Not Merged Mean Days to Close", not_merged_days_to_close_glyphs) - ], + location=location, + orientation=orientation, + border_line_color="black" + # title='Example Title' + ) + p.add_layout(legend, side) - location=location, - orientation=orientation, - border_line_color="black" - # title='Example Title' - ) - p.add_layout(legend, side) + # add_legend((150, 50), "horizontal", "center") + add_legend((10, 135), "vertical", "right") + + plot = p + + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the average number of days between comments for all closed pull requests per month " \ + "in four categories. These four categories are All Merged, All Not Merged, Slowest 20% Merged, " \ + "and Slowest 20% Not Merged." + p = add_caption_to_plot(p, caption) + + caption_plot = p - # add_legend((150, 50), "horizontal", "center") - add_legend((10, 135), "vertical", "right") + grid = gridplot([[plot], [caption_plot]]) - plot = p + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "mean_response_times_for_PR")), + mimetype='application/json', + status=200) - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the average number of days between comments for all closed pull requests per month " \ - "in four categories. These four categories are All Merged, All Not Merged, Slowest 20% Merged, " \ - "and Slowest 20% Not Merged." - p = add_caption_to_plot(p, caption) + var.headers["Access-Control-Allow-Orgin"] = "*" - caption_plot = p + return var - grid = gridplot([[plot], [caption_plot]]) + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(grid, timeout=180) - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "mean_response_times_for_PR")), - mimetype='application/json', - status=200) + return send_file(filename) - var.headers["Access-Control-Allow-Orgin"] = "*" +@app.route('/{}/pull_request_reports/mean_days_between_PR_comments/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def mean_days_between_PR_comments(): - return var + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(grid, timeout=180) + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - return send_file(filename) + return_json = request.args.get('return_json', "false") - @server.app.route('/{}/pull_request_reports/mean_days_between_PR_comments/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def mean_days_between_PR_comments(): + time_unit = 'Days' + x_axis = 'closed_yearmonth' + y_axis = 'average_days_between_responses' + description = "All Closed" + line_group = 'merged_flag' + num_outliers_repo_map = {} - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + df_type = get_df_tuple_locations() - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) - - return_json = request.args.get('return_json', "false") - - time_unit = 'Days' - x_axis = 'closed_yearmonth' - y_axis = 'average_days_between_responses' - description = "All Closed" - line_group = 'merged_flag' - num_outliers_repo_map = {} - - df_type = get_df_tuple_locations() - - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - - # gets pr_closed data - # selects only need columns (pr_closed_needed_columns) - # removes columns that cannot be NULL (pr_closed_not_null_columns) - pr_closed = df_tuple[df_type["pr_closed"]] - pr_closed_needed_columns = ['repo_id', 'repo_name', x_axis, 'average_time_between_responses', line_group] - pr_closed = filter_data(pr_closed, pr_closed_needed_columns) - - # gets pr_slow20_not_merged data - # selects only need columns (pr_slow20_not_merged_needed_columns) - # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) - pr_slow20_not_merged = df_tuple[df_type["pr_slow20_not_merged"]] - pr_slow20_not_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'average_time_between_responses', line_group] - pr_slow20_not_merged = filter_data(pr_slow20_not_merged, pr_slow20_not_merged_needed_columns) - - # gets pr_slow20_merged data - # selects only need columns (pr_slow20_not_merged_needed_columns) - # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) - pr_slow20_merged = df_tuple[df_type["pr_slow20_merged"]] - pr_slow20_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'average_time_between_responses', line_group] - pr_slow20_merged = filter_data(pr_slow20_merged, pr_slow20_merged_needed_columns) - - if len(pr_closed) == 0 or len(pr_slow20_not_merged) == 0 or len(pr_slow20_merged) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - try: - pr_closed['average_days_between_responses'] = pr_closed['average_time_between_responses'].map( - lambda x: x.days).astype(float) - pr_slow20_not_merged['average_days_between_responses'] = pr_slow20_not_merged[ - 'average_time_between_responses'].map(lambda x: x.days).astype(float) - pr_slow20_merged['average_days_between_responses'] = pr_slow20_merged['average_time_between_responses'].map( - lambda x: x.days).astype(float) - except: - return Response(response="There is no message data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + # gets pr_closed data + # selects only need columns (pr_closed_needed_columns) + # removes columns that cannot be NULL (pr_closed_not_null_columns) + pr_closed = df_tuple[df_type["pr_closed"]] + pr_closed_needed_columns = ['repo_id', 'repo_name', x_axis, 'average_time_between_responses', line_group] + pr_closed = filter_data(pr_closed, pr_closed_needed_columns) - repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} + # gets pr_slow20_not_merged data + # selects only need columns (pr_slow20_not_merged_needed_columns) + # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) + pr_slow20_not_merged = df_tuple[df_type["pr_slow20_not_merged"]] + pr_slow20_not_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'average_time_between_responses', line_group] + pr_slow20_not_merged = filter_data(pr_slow20_not_merged, pr_slow20_not_merged_needed_columns) - data_dict = {'All': pr_closed, 'Slowest 20%': pr_slow20_not_merged.append(pr_slow20_merged, ignore_index=True)} + # gets pr_slow20_merged data + # selects only need columns (pr_slow20_not_merged_needed_columns) + # removes columns that cannot be NULL (pr_slow20_not_merged_not_null_columns) + pr_slow20_merged = df_tuple[df_type["pr_slow20_merged"]] + pr_slow20_merged_needed_columns = ['repo_id', 'repo_name', x_axis, 'average_time_between_responses', line_group] + pr_slow20_merged = filter_data(pr_slow20_merged, pr_slow20_merged_needed_columns) - plot_width = 950 - p1 = figure(x_axis_type="datetime", - title="{}: Mean {} Between Comments by Month Closed for {} Pull Requests".format(repo_dict[repo_id], - time_unit, - description), - plot_width=plot_width, x_range=(pr_all[x_axis].min(), pr_all[x_axis].max()), plot_height=500, - toolbar_location=None) - colors = Category20[10][6:] - color_index = 0 + if len(pr_closed) == 0 or len(pr_slow20_not_merged) == 0 or len(pr_slow20_merged) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - glyphs = [] + try: + pr_closed['average_days_between_responses'] = pr_closed['average_time_between_responses'].map( + lambda x: x.days).astype(float) + pr_slow20_not_merged['average_days_between_responses'] = pr_slow20_not_merged[ + 'average_time_between_responses'].map(lambda x: x.days).astype(float) + pr_slow20_merged['average_days_between_responses'] = pr_slow20_merged['average_time_between_responses'].map( + lambda x: x.days).astype(float) + except: + return Response(response="There is no message data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - possible_maximums = [] - for data_desc, input_df in data_dict.items(): + repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} - driver_df = input_df.copy() + data_dict = {'All': pr_closed, 'Slowest 20%': pr_slow20_not_merged.append(pr_slow20_merged, ignore_index=True)} - driver_df = remove_outliers(driver_df, y_axis, num_outliers_repo_map) + plot_width = 950 + p1 = figure(x_axis_type="datetime", + title="{}: Mean {} Between Comments by Month Closed for {} Pull Requests".format(repo_dict[repo_id], + time_unit, + description), + plot_width=plot_width, x_range=(pr_all[x_axis].min(), pr_all[x_axis].max()), plot_height=500, + toolbar_location=None) + colors = Category20[10][6:] + color_index = 0 + + glyphs = [] + + possible_maximums = [] + for data_desc, input_df in data_dict.items(): - driver_df = driver_df.loc[driver_df['repo_id'] == repo_id] - index = 0 + driver_df = input_df.copy() - driver_df_mean = driver_df.groupby(['repo_id', line_group, x_axis], as_index=False).mean() + driver_df = remove_outliers(driver_df, y_axis, num_outliers_repo_map) - title_ending = '' - if repo_id: - title_ending += ' for Repo: {}'.format(repo_id) + driver_df = driver_df.loc[driver_df['repo_id'] == repo_id] + index = 0 - for group_num, line_group_value in enumerate(driver_df[line_group].unique(), color_index): - glyphs.append(p1.line(driver_df_mean.loc[driver_df_mean[line_group] == line_group_value][x_axis], - driver_df_mean.loc[driver_df_mean[line_group] == line_group_value][y_axis], - color=colors[group_num], line_width=3)) - color_index += 1 - possible_maximums.append( - max(driver_df_mean.loc[driver_df_mean[line_group] == line_group_value][y_axis].dropna())) - for repo, num_outliers in num_outliers_repo_map.items(): - # FIXME repo_name is not defined - if repo_name == repo: - p1.add_layout( - Title(text="** {} outliers for {} were removed".format(num_outliers, repo), align="center"), - "below") + driver_df_mean = driver_df.groupby(['repo_id', line_group, x_axis], as_index=False).mean() - p1.grid.grid_line_alpha = 0.3 - p1.xaxis.axis_label = 'Month Closed' - p1.xaxis.ticker.desired_num_ticks = 15 - p1.yaxis.axis_label = 'Mean {} Between Responses'.format(time_unit) - p1.legend.location = "top_left" + title_ending = '' + if repo_id: + title_ending += ' for Repo: {}'.format(repo_id) - legend = Legend( - items=[ - ("All Not Merged / Rejected", [glyphs[0]]), - ("All Merged / Accepted", [glyphs[1]]), - ("Slowest 20% Not Merged / Rejected", [glyphs[2]]), - ("Slowest 20% Merged / Accepted", [glyphs[3]]) - ], + for group_num, line_group_value in enumerate(driver_df[line_group].unique(), color_index): + glyphs.append(p1.line(driver_df_mean.loc[driver_df_mean[line_group] == line_group_value][x_axis], + driver_df_mean.loc[driver_df_mean[line_group] == line_group_value][y_axis], + color=colors[group_num], line_width=3)) + color_index += 1 + possible_maximums.append( + max(driver_df_mean.loc[driver_df_mean[line_group] == line_group_value][y_axis].dropna())) + for repo, num_outliers in num_outliers_repo_map.items(): + # FIXME repo_name is not defined + if repo_name == repo: + p1.add_layout( + Title(text="** {} outliers for {} were removed".format(num_outliers, repo), align="center"), + "below") - location='center_right', - orientation='vertical', - border_line_color="black" - ) + p1.grid.grid_line_alpha = 0.3 + p1.xaxis.axis_label = 'Month Closed' + p1.xaxis.ticker.desired_num_ticks = 15 + p1.yaxis.axis_label = 'Mean {} Between Responses'.format(time_unit) + p1.legend.location = "top_left" - p1.add_layout(legend, 'right') + legend = Legend( + items=[ + ("All Not Merged / Rejected", [glyphs[0]]), + ("All Merged / Accepted", [glyphs[1]]), + ("Slowest 20% Not Merged / Rejected", [glyphs[2]]), + ("Slowest 20% Merged / Accepted", [glyphs[3]]) + ], - p1.title.text_font_size = "16px" + location='center_right', + orientation='vertical', + border_line_color="black" + ) - p1.xaxis.axis_label_text_font_size = "16px" - p1.xaxis.major_label_text_font_size = "16px" + p1.add_layout(legend, 'right') - p1.yaxis.axis_label_text_font_size = "16px" - p1.yaxis.major_label_text_font_size = "16px" - p1.xaxis.major_label_orientation = 45.0 + p1.title.text_font_size = "16px" - p1.y_range = Range1d(0, max(possible_maximums) * 1.15) + p1.xaxis.axis_label_text_font_size = "16px" + p1.xaxis.major_label_text_font_size = "16px" - plot = p1 + p1.yaxis.axis_label_text_font_size = "16px" + p1.yaxis.major_label_text_font_size = "16px" + p1.xaxis.major_label_orientation = 45.0 - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the average number of days between comments for all" \ - " closed pull requests per month in four categories. These four categories" \ - " are All Merged, All Not Merged, Slowest 20% Merged, and Slowest 20% Not Merged." - p = add_caption_to_plot(p, caption) + p1.y_range = Range1d(0, max(possible_maximums) * 1.15) - caption_plot = p + plot = p1 - grid = gridplot([[plot], [caption_plot]]) + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the average number of days between comments for all" \ + " closed pull requests per month in four categories. These four categories" \ + " are All Merged, All Not Merged, Slowest 20% Merged, and Slowest 20% Not Merged." + p = add_caption_to_plot(p, caption) - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "mean_days_between_PR_comments")), - mimetype='application/json', - status=200) + caption_plot = p - var.headers["Access-Control-Allow-Orgin"] = "*" + grid = gridplot([[plot], [caption_plot]]) - return var + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "mean_days_between_PR_comments")), + mimetype='application/json', + status=200) - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(grid, timeout=180) + var.headers["Access-Control-Allow-Orgin"] = "*" - return send_file(filename) + return var - @server.app.route('/{}/pull_request_reports/PR_time_to_first_response/'.format(AUGUR_API_VERSION), methods=["GET"]) - def PR_time_to_first_response(): + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(grid, timeout=180) - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + return send_file(filename) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) +@app.route('/{}/pull_request_reports/PR_time_to_first_response/'.format(AUGUR_API_VERSION), methods=["GET"]) +def PR_time_to_first_response(): - return_json = request.args.get('return_json', "false") - remove_outliers = str(request.args.get('remove_outliers', "true")) + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - x_axis = 'pr_closed_at' - y_axis = 'days_to_first_response' - description = 'All' - group_by = 'merged_flag' - legend_position = 'top_right' + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - df_type = get_df_tuple_locations() + return_json = request.args.get('return_json', "false") + remove_outliers = str(request.args.get('remove_outliers', "true")) - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + x_axis = 'pr_closed_at' + y_axis = 'days_to_first_response' + description = 'All' + group_by = 'merged_flag' + legend_position = 'top_right' - pr_closed = df_tuple[df_type["pr_closed"]] - needed_columns = ['repo_id', 'repo_name', x_axis, group_by, y_axis] - pr_closed = filter_data(pr_closed, needed_columns) + df_type = get_df_tuple_locations() - if len(pr_closed) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} + pr_closed = df_tuple[df_type["pr_closed"]] + needed_columns = ['repo_id', 'repo_name', x_axis, group_by, y_axis] + pr_closed = filter_data(pr_closed, needed_columns) - driver_df = pr_closed.copy() + if len(pr_closed) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - outliers_removed = 0 + repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} - if remove_outliers == "true": - driver_df, outliers_removed, outlier_cutoff = remove_outliers_by_standard_deviation(driver_df, 'days_to_first_response') + driver_df = pr_closed.copy() - group_by_groups = sorted(driver_df[group_by].unique()) + outliers_removed = 0 - # setup color pallete - try: - # FIXME repo_set is not defined - colors = Colorblind[len(repo_set)] - except: - colors = Colorblind[3] + if remove_outliers == "true": + driver_df, outliers_removed, outlier_cutoff = remove_outliers_by_standard_deviation(driver_df, 'days_to_first_response') - title_beginning = '{}: '.format(repo_dict[repo_id]) - plot_width = 180 * 5 - p = figure(x_range=( - driver_df[x_axis].min() - datetime.timedelta(days=30), driver_df[x_axis].max() + datetime.timedelta(days=25)), - # (driver_df[y_axis].min(), driver_df[y_axis].max()), - toolbar_location=None, - title='{}Days to First Response for {} Closed Pull Requests'.format(title_beginning, description), - plot_width=plot_width, - plot_height=400, x_axis_type='datetime') + group_by_groups = sorted(driver_df[group_by].unique()) - for index, group_by_group in enumerate(group_by_groups): - p.scatter(x_axis, y_axis, color=colors[index], marker="square", - source=driver_df.loc[driver_df[group_by] == group_by_group], legend_label=group_by_group) + # setup color pallete + try: + # FIXME repo_set is not defined + colors = Colorblind[len(repo_set)] + except: + colors = Colorblind[3] + + title_beginning = '{}: '.format(repo_dict[repo_id]) + plot_width = 180 * 5 + p = figure(x_range=( + driver_df[x_axis].min() - datetime.timedelta(days=30), driver_df[x_axis].max() + datetime.timedelta(days=25)), + # (driver_df[y_axis].min(), driver_df[y_axis].max()), + toolbar_location=None, + title='{}Days to First Response for {} Closed Pull Requests'.format(title_beginning, description), + plot_width=plot_width, + plot_height=400, x_axis_type='datetime') + + for index, group_by_group in enumerate(group_by_groups): + p.scatter(x_axis, y_axis, color=colors[index], marker="square", + source=driver_df.loc[driver_df[group_by] == group_by_group], legend_label=group_by_group) + + if group_by_group == "Merged / Accepted": + merged_values = driver_df.loc[driver_df[group_by] == group_by_group][y_axis].dropna().values.tolist() + else: + not_merged_values = driver_df.loc[driver_df[group_by] == group_by_group][ + y_axis].dropna().values.tolist() - if group_by_group == "Merged / Accepted": - merged_values = driver_df.loc[driver_df[group_by] == group_by_group][y_axis].dropna().values.tolist() - else: - not_merged_values = driver_df.loc[driver_df[group_by] == group_by_group][ - y_axis].dropna().values.tolist() + values = not_merged_values + merged_values - values = not_merged_values + merged_values + if outliers_removed > 0: + if repo_id: + p.add_layout(Title( + text="** Outliers cut off at {} days: {} outlier(s) for {} were removed **".format(outlier_cutoff, + outliers_removed, + repo_dict[ + repo_id]), + align="center"), "below") + else: + p.add_layout(Title( + text="** Outliers cut off at {} days: {} outlier(s) were removed **".format(outlier_cutoff, + outliers_removed), + align="center"), "below") - if outliers_removed > 0: - if repo_id: - p.add_layout(Title( - text="** Outliers cut off at {} days: {} outlier(s) for {} were removed **".format(outlier_cutoff, - outliers_removed, - repo_dict[ - repo_id]), - align="center"), "below") - else: - p.add_layout(Title( - text="** Outliers cut off at {} days: {} outlier(s) were removed **".format(outlier_cutoff, - outliers_removed), - align="center"), "below") + p.xaxis.axis_label = 'Date Closed' if x_axis == 'pr_closed_at' else 'Date Created' if x_axis == 'pr_created_at' else 'Date' + p.yaxis.axis_label = 'Days to First Response' + p.legend.location = legend_position - p.xaxis.axis_label = 'Date Closed' if x_axis == 'pr_closed_at' else 'Date Created' if x_axis == 'pr_created_at' else 'Date' - p.yaxis.axis_label = 'Days to First Response' - p.legend.location = legend_position + p.title.align = "center" + p.title.text_font_size = "16px" - p.title.align = "center" - p.title.text_font_size = "16px" + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "16px" - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "16px" + p.yaxis.axis_label_text_font_size = "16px" + p.yaxis.major_label_text_font_size = "16px" - p.yaxis.axis_label_text_font_size = "16px" - p.yaxis.major_label_text_font_size = "16px" + if len(values) == 0: + return Response(response="There is no message data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - if len(values) == 0: - return Response(response="There is no message data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + # determine y_max by finding the max of the values and scaling it up a small amoutn + y_max = max(values) * 1.015 - # determine y_max by finding the max of the values and scaling it up a small amoutn - y_max = max(values) * 1.015 + p.y_range = Range1d(0, y_max) - p.y_range = Range1d(0, y_max) + plot = p - plot = p + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the days to first reponse for individual pull requests, either Merged or Not Merged." + p = add_caption_to_plot(p, caption) - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the days to first reponse for individual pull requests, either Merged or Not Merged." - p = add_caption_to_plot(p, caption) + caption_plot = p - caption_plot = p + grid = gridplot([[plot], [caption_plot]]) - grid = gridplot([[plot], [caption_plot]]) + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "PR_time_to_first_response")), + mimetype='application/json', + status=200) - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "PR_time_to_first_response")), - mimetype='application/json', - status=200) + var.headers["Access-Control-Allow-Orgin"] = "*" - var.headers["Access-Control-Allow-Orgin"] = "*" + return var - return var + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(grid, timeout=180) - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(grid, timeout=180) + return send_file(filename) - return send_file(filename) +@app.route('/{}/pull_request_reports/average_PR_events_for_closed_PRs/'.format(AUGUR_API_VERSION), + methods=["GET"]) +def average_PR_events_for_closed_PRs(): - @server.app.route('/{}/pull_request_reports/average_PR_events_for_closed_PRs/'.format(AUGUR_API_VERSION), - methods=["GET"]) - def average_PR_events_for_closed_PRs(): + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) - - return_json = request.args.get('return_json', "false") - include_comments = str(request.args.get('include_comments', True)) - - x_axis = 'closed_year' - facet = 'merged_flag' - columns = 2 - x_max = 1100 - y_axis = 'repo_name' - description = 'All Closed' - optional_comments = ['comment_count'] if include_comments else [] - - df_type = get_df_tuple_locations() - - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - - pr_closed = df_tuple[df_type["pr_closed"]] - needed_columns = ['repo_id', 'repo_name', x_axis, 'assigned_count', - 'review_requested_count', - 'labeled_count', - 'subscribed_count', - 'mentioned_count', - 'referenced_count', - 'closed_count', - 'head_ref_force_pushed_count', - 'merged_count', - 'milestoned_count', - 'unlabeled_count', - 'head_ref_deleted_count', facet] + optional_comments - pr_closed = filter_data(pr_closed, needed_columns) - - if len(pr_closed) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", + return_json = request.args.get('return_json', "false") + include_comments = str(request.args.get('include_comments', True)) + + x_axis = 'closed_year' + facet = 'merged_flag' + columns = 2 + x_max = 1100 + y_axis = 'repo_name' + description = 'All Closed' + optional_comments = ['comment_count'] if include_comments else [] + + df_type = get_df_tuple_locations() + + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + + pr_closed = df_tuple[df_type["pr_closed"]] + needed_columns = ['repo_id', 'repo_name', x_axis, 'assigned_count', + 'review_requested_count', + 'labeled_count', + 'subscribed_count', + 'mentioned_count', + 'referenced_count', + 'closed_count', + 'head_ref_force_pushed_count', + 'merged_count', + 'milestoned_count', + 'unlabeled_count', + 'head_ref_deleted_count', facet] + optional_comments + pr_closed = filter_data(pr_closed, needed_columns) + + if len(pr_closed) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) + + repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} + + colors = linear_gradient('#f5f5dc', '#fff44f', 150)['hex'] + + driver_df = pr_closed.copy() + driver_df[x_axis] = driver_df[x_axis].astype(str) + + if facet == 'closed_year' or y_axis == 'closed_year': + driver_df['closed_year'] = driver_df['closed_year'].astype(int).astype(str) + + y_groups = [ + 'review_requested_count', + 'labeled_count', + 'subscribed_count', + 'referenced_count', + 'closed_count', + # 'milestoned_count', + ] + optional_comments + + optional_group_comments = ['comment'] if include_comments else [] + # y_groups = ['subscribed', 'mentioned', 'labeled', 'review_requested', 'head_ref_force_pushed', + # 'referenced', 'closed', 'merged', 'unlabeled', 'head_ref_deleted', 'milestoned', 'assigned'] + # + optional_group_comments + + x_groups = sorted(list(driver_df[x_axis].unique())) + + grid_array = [] + grid_row = [] + + for index, facet_group in enumerate(sorted(driver_df[facet].unique())): + + facet_data = driver_df.loc[driver_df[facet] == facet_group] + # display(facet_data.sort_values('merged_count', ascending=False).head(50)) + driver_df_mean = facet_data.groupby(['repo_id', 'repo_name', x_axis], as_index=False).mean().round(1) + + # if a record is field in a record is Nan then it is not counted by count() so when it is not + # 2 meaning both rows have a value, there is not enough data + if (driver_df_mean['assigned_count'].count() != 2 or driver_df_mean[ + 'review_requested_count'].count() != 2 or driver_df_mean['labeled_count'].count() != 2 or + driver_df_mean['subscribed_count'].count() != 2 or driver_df_mean['mentioned_count'].count() != 2 or + driver_df_mean['referenced_count'].count() != 2 or + driver_df_mean['closed_count'].count() != 2 or driver_df_mean[ + 'head_ref_force_pushed_count'].count() != 2 or driver_df_mean['merged_count'].count() != 2 or + driver_df_mean['milestoned_count'].count() != 2 or driver_df_mean['unlabeled_count'].count() != 2 or + driver_df_mean['head_ref_deleted_count'].count() != 2 or + driver_df_mean['comment_count'].count() != 2): + return Response(response="There is not enough data for this repo, in the database you are accessing", mimetype='application/json', status=200) - repo_dict = {repo_id: pr_closed.loc[pr_closed['repo_id'] == repo_id].iloc[0]['repo_name']} + # print(driver_df_mean.to_string()) + # data = {'Y' : y_groups} + # for group in y_groups: + # data[group] = driver_df_mean[group].tolist() + plot_width = 700 + p = figure(y_range=y_groups, plot_height=500, plot_width=plot_width, x_range=x_groups, + title='{}'.format(format(facet_group))) + + for y_group in y_groups: + driver_df_mean['field'] = y_group + source = ColumnDataSource(driver_df_mean) + mapper = LinearColorMapper(palette=colors, low=driver_df_mean[y_group].min(), + high=driver_df_mean[y_group].max()) + + p.rect(y='field', x=x_axis, width=1, height=1, source=source, + line_color=None, fill_color=transform(y_group, mapper)) + # Data label + labels = LabelSet(x=x_axis, y='field', text=y_group, y_offset=-8, + text_font_size="12pt", text_color='black', + source=source, text_align='center') + p.add_layout(labels) - colors = linear_gradient('#f5f5dc', '#fff44f', 150)['hex'] + color_bar = ColorBar(color_mapper=mapper, location=(0, 0), + ticker=BasicTicker(desired_num_ticks=9), + formatter=PrintfTickFormatter(format="%d")) + # p.add_layout(color_bar, 'right') - driver_df = pr_closed.copy() - driver_df[x_axis] = driver_df[x_axis].astype(str) + p.y_range.range_padding = 0.1 + p.ygrid.grid_line_color = None - if facet == 'closed_year' or y_axis == 'closed_year': - driver_df['closed_year'] = driver_df['closed_year'].astype(int).astype(str) - - y_groups = [ - 'review_requested_count', - 'labeled_count', - 'subscribed_count', - 'referenced_count', - 'closed_count', - # 'milestoned_count', - ] + optional_comments - - optional_group_comments = ['comment'] if include_comments else [] - # y_groups = ['subscribed', 'mentioned', 'labeled', 'review_requested', 'head_ref_force_pushed', - # 'referenced', 'closed', 'merged', 'unlabeled', 'head_ref_deleted', 'milestoned', 'assigned'] - # + optional_group_comments - - x_groups = sorted(list(driver_df[x_axis].unique())) - - grid_array = [] - grid_row = [] - - for index, facet_group in enumerate(sorted(driver_df[facet].unique())): - - facet_data = driver_df.loc[driver_df[facet] == facet_group] - # display(facet_data.sort_values('merged_count', ascending=False).head(50)) - driver_df_mean = facet_data.groupby(['repo_id', 'repo_name', x_axis], as_index=False).mean().round(1) - - # if a record is field in a record is Nan then it is not counted by count() so when it is not - # 2 meaning both rows have a value, there is not enough data - if (driver_df_mean['assigned_count'].count() != 2 or driver_df_mean[ - 'review_requested_count'].count() != 2 or driver_df_mean['labeled_count'].count() != 2 or - driver_df_mean['subscribed_count'].count() != 2 or driver_df_mean['mentioned_count'].count() != 2 or - driver_df_mean['referenced_count'].count() != 2 or - driver_df_mean['closed_count'].count() != 2 or driver_df_mean[ - 'head_ref_force_pushed_count'].count() != 2 or driver_df_mean['merged_count'].count() != 2 or - driver_df_mean['milestoned_count'].count() != 2 or driver_df_mean['unlabeled_count'].count() != 2 or - driver_df_mean['head_ref_deleted_count'].count() != 2 or - driver_df_mean['comment_count'].count() != 2): - return Response(response="There is not enough data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) - - # print(driver_df_mean.to_string()) - # data = {'Y' : y_groups} - # for group in y_groups: - # data[group] = driver_df_mean[group].tolist() - plot_width = 700 - p = figure(y_range=y_groups, plot_height=500, plot_width=plot_width, x_range=x_groups, - title='{}'.format(format(facet_group))) - - for y_group in y_groups: - driver_df_mean['field'] = y_group - source = ColumnDataSource(driver_df_mean) - mapper = LinearColorMapper(palette=colors, low=driver_df_mean[y_group].min(), - high=driver_df_mean[y_group].max()) - - p.rect(y='field', x=x_axis, width=1, height=1, source=source, - line_color=None, fill_color=transform(y_group, mapper)) - # Data label - labels = LabelSet(x=x_axis, y='field', text=y_group, y_offset=-8, - text_font_size="12pt", text_color='black', - source=source, text_align='center') - p.add_layout(labels) - - color_bar = ColorBar(color_mapper=mapper, location=(0, 0), - ticker=BasicTicker(desired_num_ticks=9), - formatter=PrintfTickFormatter(format="%d")) - # p.add_layout(color_bar, 'right') - - p.y_range.range_padding = 0.1 - p.ygrid.grid_line_color = None - - p.legend.location = "bottom_right" - p.axis.minor_tick_line_color = None - p.outline_line_color = None - - p.xaxis.axis_label = 'Year Closed' - p.yaxis.axis_label = 'Event Type' - - p.title.align = "center" - p.title.text_font_size = "15px" - - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "16px" - - p.yaxis.axis_label_text_font_size = "16px" - p.yaxis.major_label_text_font_size = "16px" - - grid_row.append(p) - if index % columns == columns - 1: - grid_array.append(grid_row) - grid_row = [] - grid = gridplot(grid_array) - - # create caption plot - caption_plot = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the average count of several different event types for " \ - "closed pull requests per year. It spilits the pull requests into two categories, " \ - "Merged / Accepted, and Not Merged / Rejected, so the similarities and differences are clear." - - caption_plot.add_layout(Label(x=0, y=380, x_units='screen', y_units='screen', text='{}'.format(caption), - text_font='times', text_font_size='15pt', render_mode='css')) - - # caption_plot.outline_line_color = None - caption_plot.toolbar_location = None - - # create title plot - title_plot = figure(width=plot_width, height=50, margin=(0, 0, 0, 0)) - title = '{}: Average Pull Request Event Types for {} Pull Requests'.format(repo_dict[repo_id], description) - - title_plot.add_layout(Label(x=550, y=0, x_units='screen', y_units='screen', text='{}'.format(title), - text_font='times', text_font_size='17px', - text_font_style='bold', render_mode='css')) - - # title_plot.outline_line_color = None - title_plot.toolbar_location = None - - layout = column([title_plot, grid, caption_plot], sizing_mode='scale_width') - - if return_json == "true": - var = Response(response=json.dumps(json_item(layout, "average_PR_events_for_closed_PRs")), - mimetype='application/json', - status=200) - - var.headers["Access-Control-Allow-Orgin"] = "*" - - return var - - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - filename = export_png(layout, timeout=181) # , webdriver=selenium.webdriver.firefox.webdriver) - - return send_file(filename) - - @server.app.route('/{}/pull_request_reports/Average_PR_duration/'.format(AUGUR_API_VERSION), methods=["GET"]) - def Average_PR_duration(): + p.legend.location = "bottom_right" + p.axis.minor_tick_line_color = None + p.outline_line_color = None + + p.xaxis.axis_label = 'Year Closed' + p.yaxis.axis_label = 'Event Type' - repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() + p.title.align = "center" + p.title.text_font_size = "15px" - if error: - return Response(response=error["message"], - mimetype='application/json', - status=error["status_code"]) - - group_by = str(request.args.get('group_by', "month")) - return_json = request.args.get('return_json', "false") - remove_outliers = str(request.args.get('remove_outliers', "true")) + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "16px" - x_axis = 'repo_name' - group_by = 'merged_flag' - y_axis = 'closed_yearmonth' - description = "All Closed" - heat_field = 'pr_duration_days' - columns = 2 + p.yaxis.axis_label_text_font_size = "16px" + p.yaxis.major_label_text_font_size = "16px" - df_type = get_df_tuple_locations() + grid_row.append(p) + if index % columns == columns - 1: + grid_array.append(grid_row) + grid_row = [] + grid = gridplot(grid_array) - df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) + # create caption plot + caption_plot = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the average count of several different event types for " \ + "closed pull requests per year. It spilits the pull requests into two categories, " \ + "Merged / Accepted, and Not Merged / Rejected, so the similarities and differences are clear." - pr_closed = df_tuple[df_type["pr_closed"]] - needed_columns = ['repo_id', y_axis, group_by, x_axis, 'pr_closed_at', 'pr_created_at'] - pr_closed = filter_data(pr_closed, needed_columns) + caption_plot.add_layout(Label(x=0, y=380, x_units='screen', y_units='screen', text='{}'.format(caption), + text_font='times', text_font_size='15pt', render_mode='css')) - if len(pr_closed) == 0: - return Response(response="There is no data for this repo, in the database you are accessing", - mimetype='application/json', - status=200) + # caption_plot.outline_line_color = None + caption_plot.toolbar_location = None - pr_duration_frame = pr_closed.assign(pr_duration=(pr_closed['pr_closed_at'] - pr_closed['pr_created_at'])) - pr_duration_frame = pr_duration_frame.assign( - pr_duration_days=(pr_duration_frame['pr_duration'] / datetime.timedelta(minutes=1)) / 60 / 24) + # create title plot + title_plot = figure(width=plot_width, height=50, margin=(0, 0, 0, 0)) + title = '{}: Average Pull Request Event Types for {} Pull Requests'.format(repo_dict[repo_id], description) - repo_dict = {repo_id: pr_duration_frame.loc[pr_duration_frame['repo_id'] == repo_id].iloc[0]['repo_name']} + title_plot.add_layout(Label(x=550, y=0, x_units='screen', y_units='screen', text='{}'.format(title), + text_font='times', text_font_size='17px', + text_font_style='bold', render_mode='css')) - red_green_gradient = linear_gradient('#0080FF', '#DC143C', 150)['hex'] # 32CD32 + # title_plot.outline_line_color = None + title_plot.toolbar_location = None - driver_df = pr_duration_frame.copy() + layout = column([title_plot, grid, caption_plot], sizing_mode='scale_width') - driver_df[y_axis] = driver_df[y_axis].astype(str) + if return_json == "true": + var = Response(response=json.dumps(json_item(layout, "average_PR_events_for_closed_PRs")), + mimetype='application/json', + status=200) - # add new group by + xaxis column - driver_df['grouped_x'] = driver_df[x_axis] + ' - ' + driver_df[group_by] + var.headers["Access-Control-Allow-Orgin"] = "*" - driver_df_mean = driver_df.groupby(['grouped_x', y_axis], as_index=False).mean() + return var - colors = red_green_gradient - y_groups = driver_df_mean[y_axis].unique() - x_groups = sorted(driver_df[x_axis].unique()) - grouped_x_groups = sorted(driver_df_mean['grouped_x'].unique()) + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + filename = export_png(layout, timeout=181) # , webdriver=selenium.webdriver.firefox.webdriver) - # defualt outliers removed to 0 - outliers_removed = 0 + return send_file(filename) - if remove_outliers == "true": - driver_df_mean, outliers_removed, outlier_cutoff = remove_outliers_by_standard_deviation(driver_df_mean, - heat_field) +@app.route('/{}/pull_request_reports/Average_PR_duration/'.format(AUGUR_API_VERSION), methods=["GET"]) +def Average_PR_duration(): - values = driver_df_mean[heat_field].values.tolist() + repo_id, start_date, end_date, error = get_repo_id_start_date_and_end_date() - heat_max = max(values) * 1.02 + if error: + return Response(response=error["message"], + mimetype='application/json', + status=error["status_code"]) + + group_by = str(request.args.get('group_by', "month")) + return_json = request.args.get('return_json', "false") + remove_outliers = str(request.args.get('remove_outliers', "true")) - mapper = LinearColorMapper(palette=colors, low=driver_df_mean[heat_field].min(), - high=heat_max) # driver_df_mean[heat_field].max()) + x_axis = 'repo_name' + group_by = 'merged_flag' + y_axis = 'closed_yearmonth' + description = "All Closed" + heat_field = 'pr_duration_days' + columns = 2 - source = ColumnDataSource(driver_df_mean) - title_beginning = repo_dict[repo_id] + ':' - plot_width = 1100 - p = figure(plot_width=plot_width, plot_height=300, - title="{} Mean Duration (Days) {} Pull Requests".format(title_beginning, description), - y_range=grouped_x_groups[::-1], x_range=y_groups, - toolbar_location=None, tools="") # , x_axis_location="above") + df_type = get_df_tuple_locations() - for x_group in x_groups: - outliers = driver_df_mean.loc[ - (driver_df_mean[heat_field] > heat_max) & (driver_df_mean['grouped_x'].str.contains(x_group))] + df_tuple = pull_request_data_collection(repo_id=repo_id, start_date=start_date, end_date=end_date) - if outliers_removed > 0: - p.add_layout(Title( - text="** Outliers capped at {} days: {} outlier(s) for {} were capped at {} **".format( - outlier_cutoff, outliers_removed, x_group, outlier_cutoff), align="center"), "below") + pr_closed = df_tuple[df_type["pr_closed"]] + needed_columns = ['repo_id', y_axis, group_by, x_axis, 'pr_closed_at', 'pr_created_at'] + pr_closed = filter_data(pr_closed, needed_columns) - p.rect(x=y_axis, y='grouped_x', width=1, height=1, source=source, - line_color=None, fill_color=transform(heat_field, mapper)) + if len(pr_closed) == 0: + return Response(response="There is no data for this repo, in the database you are accessing", + mimetype='application/json', + status=200) - color_bar = ColorBar(color_mapper=mapper, location=(0, 0), - ticker=BasicTicker(desired_num_ticks=9), - formatter=PrintfTickFormatter(format="%d")) + pr_duration_frame = pr_closed.assign(pr_duration=(pr_closed['pr_closed_at'] - pr_closed['pr_created_at'])) + pr_duration_frame = pr_duration_frame.assign( + pr_duration_days=(pr_duration_frame['pr_duration'] / datetime.timedelta(minutes=1)) / 60 / 24) - p.add_layout(color_bar, 'right') + repo_dict = {repo_id: pr_duration_frame.loc[pr_duration_frame['repo_id'] == repo_id].iloc[0]['repo_name']} - p.title.align = "center" - p.title.text_font_size = "16px" + red_green_gradient = linear_gradient('#0080FF', '#DC143C', 150)['hex'] # 32CD32 - p.axis.axis_line_color = None - p.axis.major_tick_line_color = None - p.axis.major_label_text_font_size = "11pt" - p.axis.major_label_standoff = 0 - p.xaxis.major_label_orientation = 1.0 - p.xaxis.axis_label = 'Month Closed' if y_axis[0:6] == 'closed' else 'Date Created' if y_axis[ - 0:7] == 'created' else 'Repository' if y_axis == 'repo_name' else '' - # p.yaxis.axis_label = 'Merged Status' + driver_df = pr_duration_frame.copy() - p.title.text_font_size = "16px" + driver_df[y_axis] = driver_df[y_axis].astype(str) - p.xaxis.axis_label_text_font_size = "16px" - p.xaxis.major_label_text_font_size = "14px" + # add new group by + xaxis column + driver_df['grouped_x'] = driver_df[x_axis] + ' - ' + driver_df[group_by] + + driver_df_mean = driver_df.groupby(['grouped_x', y_axis], as_index=False).mean() + + colors = red_green_gradient + y_groups = driver_df_mean[y_axis].unique() + x_groups = sorted(driver_df[x_axis].unique()) + grouped_x_groups = sorted(driver_df_mean['grouped_x'].unique()) + + # defualt outliers removed to 0 + outliers_removed = 0 + + if remove_outliers == "true": + driver_df_mean, outliers_removed, outlier_cutoff = remove_outliers_by_standard_deviation(driver_df_mean, + heat_field) + + values = driver_df_mean[heat_field].values.tolist() + + heat_max = max(values) * 1.02 + + mapper = LinearColorMapper(palette=colors, low=driver_df_mean[heat_field].min(), + high=heat_max) # driver_df_mean[heat_field].max()) + + source = ColumnDataSource(driver_df_mean) + title_beginning = repo_dict[repo_id] + ':' + plot_width = 1100 + p = figure(plot_width=plot_width, plot_height=300, + title="{} Mean Duration (Days) {} Pull Requests".format(title_beginning, description), + y_range=grouped_x_groups[::-1], x_range=y_groups, + toolbar_location=None, tools="") # , x_axis_location="above") + + for x_group in x_groups: + outliers = driver_df_mean.loc[ + (driver_df_mean[heat_field] > heat_max) & (driver_df_mean['grouped_x'].str.contains(x_group))] + + if outliers_removed > 0: + p.add_layout(Title( + text="** Outliers capped at {} days: {} outlier(s) for {} were capped at {} **".format( + outlier_cutoff, outliers_removed, x_group, outlier_cutoff), align="center"), "below") + + p.rect(x=y_axis, y='grouped_x', width=1, height=1, source=source, + line_color=None, fill_color=transform(heat_field, mapper)) + + color_bar = ColorBar(color_mapper=mapper, location=(0, 0), + ticker=BasicTicker(desired_num_ticks=9), + formatter=PrintfTickFormatter(format="%d")) + + p.add_layout(color_bar, 'right') + + p.title.align = "center" + p.title.text_font_size = "16px" + + p.axis.axis_line_color = None + p.axis.major_tick_line_color = None + p.axis.major_label_text_font_size = "11pt" + p.axis.major_label_standoff = 0 + p.xaxis.major_label_orientation = 1.0 + p.xaxis.axis_label = 'Month Closed' if y_axis[0:6] == 'closed' else 'Date Created' if y_axis[ + 0:7] == 'created' else 'Repository' if y_axis == 'repo_name' else '' + # p.yaxis.axis_label = 'Merged Status' + + p.title.text_font_size = "16px" + + p.xaxis.axis_label_text_font_size = "16px" + p.xaxis.major_label_text_font_size = "14px" - p.yaxis.major_label_text_font_size = "15px" + p.yaxis.major_label_text_font_size = "15px" - plot = p + plot = p - p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) - caption = "This graph shows the average duration of all closed pull requests. " \ - "Red represents a slow response relative to the others, while blue a light blue " \ - "represents a fast response relative to the others. Blank cells represents months " \ - "without pull requests." - p = add_caption_to_plot(p, caption) - caption_plot = p + p = figure(width=plot_width, height=200, margin=(0, 0, 0, 0)) + caption = "This graph shows the average duration of all closed pull requests. " \ + "Red represents a slow response relative to the others, while blue a light blue " \ + "represents a fast response relative to the others. Blank cells represents months " \ + "without pull requests." + p = add_caption_to_plot(p, caption) + caption_plot = p - grid = gridplot([[plot], [caption_plot]]) + grid = gridplot([[plot], [caption_plot]]) - if return_json == "true": - var = Response(response=json.dumps(json_item(grid, "Average_PR_duration")), - mimetype='application/json', - status=200) + if return_json == "true": + var = Response(response=json.dumps(json_item(grid, "Average_PR_duration")), + mimetype='application/json', + status=200) - var.headers["Access-Control-Allow-Orgin"] = "*" + var.headers["Access-Control-Allow-Orgin"] = "*" - return var + return var - # opts = FirefoxOptions() - # opts.add_argument("--headless") - # driver = webdriver.Firefox(firefox_options=opts) - # newt = get_screenshot_as_png(grid, timeout=180, webdriver=selenium.webdriver.firefox.webdriver) - # filename = export_png(grid, timeout=180, webdriver=selenium.webdriver.firefox.webdriver) - filename = export_png(grid, timeout=180) + # opts = FirefoxOptions() + # opts.add_argument("--headless") + # driver = webdriver.Firefox(firefox_options=opts) + # newt = get_screenshot_as_png(grid, timeout=180, webdriver=selenium.webdriver.firefox.webdriver) + # filename = export_png(grid, timeout=180, webdriver=selenium.webdriver.firefox.webdriver) + filename = export_png(grid, timeout=180) - # return sendfile(newt) - return send_file(filename) + # return sendfile(newt) + return send_file(filename) diff --git a/augur/api/routes/user.py b/augur/api/routes/user.py index 3362d61211..ff2e4a7955 100644 --- a/augur/api/routes/user.py +++ b/augur/api/routes/user.py @@ -26,11 +26,11 @@ from augur.application.db.models import User, UserRepo, UserGroup, UserSessionToken, ClientApplication, RefreshToken from augur.application.config import get_development_flag from augur.tasks.init.redis_connection import redis_connection as redis +from ..server import app, engine logger = logging.getLogger(__name__) development = get_development_flag() -from augur.application.db.engine import DatabaseEngine -Session = sessionmaker(bind=DatabaseEngine().engine) +Session = sessionmaker(bind=engine) from augur.api.routes import AUGUR_API_VERSION @@ -72,471 +72,470 @@ def generate_upgrade_request(): return response, 426 -def create_routes(server): - @server.app.route(f"/{AUGUR_API_VERSION}/user/validate", methods=['POST']) - def validate_user(): - if not development and not request.is_secure: - return generate_upgrade_request() +@app.route(f"/{AUGUR_API_VERSION}/user/validate", methods=['POST']) +def validate_user(): + if not development and not request.is_secure: + return generate_upgrade_request() - - username = request.args.get("username") - password = request.args.get("password") - if username is None or password is None: - # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400 - return jsonify({"status": "Missing argument"}), 400 + + username = request.args.get("username") + password = request.args.get("password") + if username is None or password is None: + # https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400 + return jsonify({"status": "Missing argument"}), 400 - session = Session() - user = session.query(User).filter(User.login_name == username).first() - session.close() + session = Session() + user = session.query(User).filter(User.login_name == username).first() + session.close() + + if user is None: + return jsonify({"status": "Invalid username"}) + + checkPassword = check_password_hash(user.login_hashword, password) + if checkPassword == False: + return jsonify({"status": "Invalid password"}) - if user is None: - return jsonify({"status": "Invalid username"}) - checkPassword = check_password_hash(user.login_hashword, password) - if checkPassword == False: - return jsonify({"status": "Invalid password"}) + login_user(user) + return jsonify({"status": "Validated"}) - login_user(user) - return jsonify({"status": "Validated"}) +@app.route(f"/{AUGUR_API_VERSION}/user/logout", methods=['POST']) +@login_required +def logout_user_func(): + if not development and not request.is_secure: + return generate_upgrade_request() + if logout_user(): + return jsonify({"status": "Logged out"}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/logout", methods=['POST']) - @login_required - def logout_user_func(): - if not development and not request.is_secure: - return generate_upgrade_request() + return jsonify({"status": "Error when logging out"}) - if logout_user(): - return jsonify({"status": "Logged out"}) - return jsonify({"status": "Error when logging out"}) +@app.route(f"/{AUGUR_API_VERSION}/user/authorize", methods=['POST', 'GET']) +@login_required +def user_authorize(): + code = secrets.token_hex() + username = current_user.login_name + redis.set(code, username, ex=300) + + return jsonify({"status": "Validated", "code": code}) + +@app.route(f"/{AUGUR_API_VERSION}/user/session/generate", methods=['POST']) +@api_key_required +def generate_session(application): + code = request.args.get("code") + if not code: + return jsonify({"status": "Missing argument: code"}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/authorize", methods=['POST', 'GET']) - @login_required - def user_authorize(): - code = secrets.token_hex() - username = current_user.login_name - - redis.set(code, username, ex=300) - - return jsonify({"status": "Validated", "code": code}) - - @server.app.route(f"/{AUGUR_API_VERSION}/user/session/generate", methods=['POST']) - @api_key_required - def generate_session(application): - code = request.args.get("code") - if not code: - return jsonify({"status": "Missing argument: code"}) - - if request.args.get("grant_type") != "code": - return jsonify({"status": "Invalid grant type"}) + if request.args.get("grant_type") != "code": + return jsonify({"status": "Invalid grant type"}) - username = redis.get(code) - redis.delete(code) - if not username: - return jsonify({"status": "Invalid authorization code"}) + username = redis.get(code) + redis.delete(code) + if not username: + return jsonify({"status": "Invalid authorization code"}) - with DatabaseSession(logger) as session: + with DatabaseSession(logger) as session: - user = User.get_user(session, username) - if not user: - return jsonify({"status": "Invalid user"}) + user = User.get_user(session, username) + if not user: + return jsonify({"status": "Invalid user"}) - seconds_to_expire = 86400 + seconds_to_expire = 86400 - existing_session = session.query(UserSessionToken).filter(UserSessionToken.user_id == user.user_id, UserSessionToken.application_id == application.id).first() - if existing_session: - existing_session.delete_refresh_tokens(session) + existing_session = session.query(UserSessionToken).filter(UserSessionToken.user_id == user.user_id, UserSessionToken.application_id == application.id).first() + if existing_session: + existing_session.delete_refresh_tokens(session) - session.delete(existing_session) - session.commit() + session.delete(existing_session) + session.commit() - user_session_token = UserSessionToken.create(session, user.user_id, application.id, seconds_to_expire).token - refresh_token = RefreshToken.create(session, user_session_token) + user_session_token = UserSessionToken.create(session, user.user_id, application.id, seconds_to_expire).token + refresh_token = RefreshToken.create(session, user_session_token) - response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) - response.headers["Cache-Control"] = "no-store" + response = jsonify({"status": "Validated", "username": username, "access_token": user_session_token, "refresh_token" : refresh_token.id, "token_type": "Bearer", "expires": seconds_to_expire}) + response.headers["Cache-Control"] = "no-store" - return response + return response + +@app.route(f"/{AUGUR_API_VERSION}/user/session/refresh", methods=["GET", "POST"]) +@api_key_required +def refresh_session(application): + refresh_token_str = request.args.get("refresh_token") + + if not refresh_token_str: + return jsonify({"status": "Invalid refresh token"}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/session/refresh", methods=["GET", "POST"]) - @api_key_required - def refresh_session(application): - refresh_token_str = request.args.get("refresh_token") + if request.args.get("grant_type") != "refresh_token": + return jsonify({"status": "Invalid grant type"}) - if not refresh_token_str: + with DatabaseSession(logger) as session: + + refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() + if not refresh_token: return jsonify({"status": "Invalid refresh token"}) - - if request.args.get("grant_type") != "refresh_token": - return jsonify({"status": "Invalid grant type"}) - with DatabaseSession(logger) as session: + if refresh_token.user_session.application == application: + return jsonify({"status": "Applications do not match"}) - refresh_token = session.query(RefreshToken).filter(RefreshToken.id == refresh_token_str).first() - if not refresh_token: - return jsonify({"status": "Invalid refresh token"}) + user_session = refresh_token.user_session + user = user_session.user - if refresh_token.user_session.application == application: - return jsonify({"status": "Applications do not match"}) + new_user_session_token = UserSessionToken.create(session, user.user_id, user_session.application.id).token + new_refresh_token_id = RefreshToken.create(session, new_user_session_token).id + + session.delete(refresh_token) + session.delete(user_session) + session.commit() - user_session = refresh_token.user_session - user = user_session.user + return jsonify({"status": "Validated", "refresh_token": new_refresh_token_id, "access_token": new_user_session_token, "expires": 86400}) - new_user_session_token = UserSessionToken.create(session, user.user_id, user_session.application.id).token - new_refresh_token_id = RefreshToken.create(session, new_user_session_token).id - - session.delete(refresh_token) - session.delete(user_session) - session.commit() - return jsonify({"status": "Validated", "refresh_token": new_refresh_token_id, "access_token": new_user_session_token, "expires": 86400}) +@app.route(f"/{AUGUR_API_VERSION}/user/query", methods=['POST']) +def query_user(): + if not development and not request.is_secure: + return generate_upgrade_request() - - @server.app.route(f"/{AUGUR_API_VERSION}/user/query", methods=['POST']) - def query_user(): - if not development and not request.is_secure: - return generate_upgrade_request() + username = request.args.get("username") + if username is None: + return jsonify({"status": "Missing argument"}), 400 - username = request.args.get("username") - if username is None: - return jsonify({"status": "Missing argument"}), 400 + if not User.exists(username): + return jsonify({"status": "Invalid username"}) - if not User.exists(username): - return jsonify({"status": "Invalid username"}) + return jsonify({"status": True}) - return jsonify({"status": True}) +@app.route(f"/{AUGUR_API_VERSION}/user/create", methods=['GET', 'POST']) +def create_user(): + if not development and not request.is_secure: + return generate_upgrade_request() - @server.app.route(f"/{AUGUR_API_VERSION}/user/create", methods=['GET', 'POST']) - def create_user(): - if not development and not request.is_secure: - return generate_upgrade_request() + username = request.args.get("username") + password = request.args.get("password") + email = request.args.get("email") + first_name = request.args.get("first_name") + last_name = request.args.get("last_name") + admin = request.args.get("create_admin") or False - username = request.args.get("username") - password = request.args.get("password") - email = request.args.get("email") - first_name = request.args.get("first_name") - last_name = request.args.get("last_name") - admin = request.args.get("create_admin") or False + result = User.create_user(username, password, email, first_name, last_name, admin) - result = User.create_user(username, password, email, first_name, last_name, admin) + return jsonify(result[1]) - return jsonify(result[1]) - - @server.app.route(f"/{AUGUR_API_VERSION}/user/remove", methods=['POST', 'DELETE']) - @login_required - def delete_user(): - if not development and not request.is_secure: - return generate_upgrade_request() - - status = current_user.delete() - return jsonify(status) +@app.route(f"/{AUGUR_API_VERSION}/user/remove", methods=['POST', 'DELETE']) +@login_required +def delete_user(): + if not development and not request.is_secure: + return generate_upgrade_request() + status = current_user.delete() + return jsonify(status) - @server.app.route(f"/{AUGUR_API_VERSION}/user/update", methods=['POST']) - @login_required - def update_user(): - if not development and not request.is_secure: - return generate_upgrade_request() - email = request.args.get("email") - new_login_name = request.args.get("new_username") - new_password = request.args.get("new_password") +@app.route(f"/{AUGUR_API_VERSION}/user/update", methods=['POST']) +@login_required +def update_user(): + if not development and not request.is_secure: + return generate_upgrade_request() - if email is not None: - existing_user = session.query(User).filter(User.email == email).one() - if existing_user is not None: - session = Session() - return jsonify({"status": "Already an account with this email"}) + email = request.args.get("email") + new_login_name = request.args.get("new_username") + new_password = request.args.get("new_password") - current_user.email = email - session.commit() + if email is not None: + existing_user = session.query(User).filter(User.email == email).one() + if existing_user is not None: session = Session() - return jsonify({"status": "Email Updated"}) + return jsonify({"status": "Already an account with this email"}) - if new_password is not None: - current_user.login_hashword = generate_password_hash(new_password) - session.commit() - session = Session() - return jsonify({"status": "Password Updated"}) + current_user.email = email + session.commit() + session = Session() + return jsonify({"status": "Email Updated"}) - if new_login_name is not None: - existing_user = session.query(User).filter(User.login_name == new_login_name).one() - if existing_user is not None: - return jsonify({"status": "Username already taken"}) + if new_password is not None: + current_user.login_hashword = generate_password_hash(new_password) + session.commit() + session = Session() + return jsonify({"status": "Password Updated"}) - current_user.login_name = new_login_name - session.commit() - session = Session() - return jsonify({"status": "Username Updated"}) + if new_login_name is not None: + existing_user = session.query(User).filter(User.login_name == new_login_name).one() + if existing_user is not None: + return jsonify({"status": "Username already taken"}) - return jsonify({"status": "Missing argument"}), 400 + current_user.login_name = new_login_name + session.commit() + session = Session() + return jsonify({"status": "Username Updated"}) + return jsonify({"status": "Missing argument"}), 400 - @server.app.route(f"/{AUGUR_API_VERSION}/user/repo/add", methods=['GET', 'POST']) - @login_required - def add_user_repo(): - if not development and not request.is_secure: - return generate_upgrade_request() - repo = request.args.get("repo_url") - group_name = request.args.get("group_name") +@app.route(f"/{AUGUR_API_VERSION}/user/repo/add", methods=['GET', 'POST']) +@login_required +def add_user_repo(): + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.add_repo(group_name, repo) + repo = request.args.get("repo_url") + group_name = request.args.get("group_name") - return jsonify(result[1]) + result = current_user.add_repo(group_name, repo) + return jsonify(result[1]) - @server.app.route(f"/{AUGUR_API_VERSION}/user/group/add", methods=['GET', 'POST']) - @login_required - def add_user_group(): - if not development and not request.is_secure: - return generate_upgrade_request() - group_name = request.args.get("group_name") +@app.route(f"/{AUGUR_API_VERSION}/user/group/add", methods=['GET', 'POST']) +@login_required +def add_user_group(): + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.add_group(group_name) + group_name = request.args.get("group_name") - return jsonify(result[1]) + result = current_user.add_group(group_name) - @server.app.route(f"/{AUGUR_API_VERSION}/user/group/remove", methods=['GET', 'POST']) - @login_required - def remove_user_group(): - if not development and not request.is_secure: - return generate_upgrade_request() + return jsonify(result[1]) - group_name = request.args.get("group_name") +@app.route(f"/{AUGUR_API_VERSION}/user/group/remove", methods=['GET', 'POST']) +@login_required +def remove_user_group(): + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.remove_group(group_name) + group_name = request.args.get("group_name") - return jsonify(result[1]) + result = current_user.remove_group(group_name) + return jsonify(result[1]) - @server.app.route(f"/{AUGUR_API_VERSION}/user/org/add", methods=['GET', 'POST']) - @login_required - def add_user_org(): - if not development and not request.is_secure: - return generate_upgrade_request() - org = request.args.get("org_url") - group_name = request.args.get("group_name") +@app.route(f"/{AUGUR_API_VERSION}/user/org/add", methods=['GET', 'POST']) +@login_required +def add_user_org(): + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.add_org(group_name, org) + org = request.args.get("org_url") + group_name = request.args.get("group_name") - return jsonify(result[1]) + result = current_user.add_org(group_name, org) + return jsonify(result[1]) - @server.app.route(f"/{AUGUR_API_VERSION}/user/repo/remove", methods=['GET', 'POST']) - @login_required - def remove_user_repo(): - if not development and not request.is_secure: - return generate_upgrade_request() +@app.route(f"/{AUGUR_API_VERSION}/user/repo/remove", methods=['GET', 'POST']) +@login_required +def remove_user_repo(): + if not development and not request.is_secure: + return generate_upgrade_request() - group_name = request.args.get("group_name") - try: - repo_id = int(request.args.get("repo_id")) - except TypeError: - return {"status": "Repo id must be and integer"} + group_name = request.args.get("group_name") - result = current_user.remove_repo(group_name, repo_id) + try: + repo_id = int(request.args.get("repo_id")) + except TypeError: + return {"status": "Repo id must be and integer"} - return jsonify(result[1]) + result = current_user.remove_repo(group_name, repo_id) - @server.app.route(f"/{AUGUR_API_VERSION}/user/group/repos/", methods=['GET', 'POST']) - @login_required - def group_repos(): - """Select repos from a user group by name + return jsonify(result[1]) - Arguments - ---------- - group_name : str - The name of the group to select - page : int = 0 -> [>= 0] - The page offset to use for pagination (optional) - page_size : int = 25 -> [> 0] - The number of result per page (optional) - sort : str - The name of the column to sort the data by (optional) - direction : str = "ASC" -> ["ASC" | "DESC"] - The direction to be used for sorting (optional) +@app.route(f"/{AUGUR_API_VERSION}/user/group/repos/", methods=['GET', 'POST']) +@login_required +def group_repos(): + """Select repos from a user group by name - Returns - ------- - list - A list of dictionaries containing repos which match the given arguments - """ + Arguments + ---------- + group_name : str + The name of the group to select + page : int = 0 -> [>= 0] + The page offset to use for pagination (optional) + page_size : int = 25 -> [> 0] + The number of result per page (optional) + sort : str + The name of the column to sort the data by (optional) + direction : str = "ASC" -> ["ASC" | "DESC"] + The direction to be used for sorting (optional) - if not development and not request.is_secure: - return generate_upgrade_request() + Returns + ------- + list + A list of dictionaries containing repos which match the given arguments + """ - group_name = request.args.get("group_name") - page = request.args.get("page") or 0 - page_size = request.args.get("page_size") or 25 - sort = request.args.get("sort") or "repo_id" - direction = request.args.get("direction") or "ASC" + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.get_group_repos(group_name, page, page_size, sort, direction) + group_name = request.args.get("group_name") + page = request.args.get("page") or 0 + page_size = request.args.get("page_size") or 25 + sort = request.args.get("sort") or "repo_id" + direction = request.args.get("direction") or "ASC" + result = current_user.get_group_repos(group_name, page, page_size, sort, direction) - result_dict = result[1] - if result[0] is not None: - - for repo in result[0]: - repo["base64_url"] = str(repo["base64_url"].decode()) - result_dict.update({"repos": result[0]}) + result_dict = result[1] + if result[0] is not None: + + for repo in result[0]: + repo["base64_url"] = str(repo["base64_url"].decode()) - return jsonify(result_dict) + result_dict.update({"repos": result[0]}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/group/repos/count", methods=['GET', 'POST']) - @login_required - def group_repo_count(): - """Count repos from a user group by name + return jsonify(result_dict) - Arguments - ---------- - username : str - The username of the user making the request - group_name : str - The name of the group to select +@app.route(f"/{AUGUR_API_VERSION}/user/group/repos/count", methods=['GET', 'POST']) +@login_required +def group_repo_count(): + """Count repos from a user group by name - Returns - ------- - int - A count of the repos in the given user group - """ + Arguments + ---------- + username : str + The username of the user making the request + group_name : str + The name of the group to select - if not development and not request.is_secure: - return generate_upgrade_request() + Returns + ------- + int + A count of the repos in the given user group + """ - group_name = request.args.get("group_name") + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.get_group_repo_count(group_name) + group_name = request.args.get("group_name") - result_dict = result[1] - if result[0] is not None: - result_dict.update({"repo_count": result[0]}) + result = current_user.get_group_repo_count(group_name) - return jsonify(result_dict) + result_dict = result[1] + if result[0] is not None: + result_dict.update({"repo_count": result[0]}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/names", methods=['GET', 'POST']) - @login_required - def get_user_groups(): - """Get a list of user groups by username + return jsonify(result_dict) - Arguments - ---------- - username : str - The username of the user making the request +@app.route(f"/{AUGUR_API_VERSION}/user/groups/names", methods=['GET', 'POST']) +@login_required +def get_user_groups(): + """Get a list of user groups by username - Returns - ------- - list - A list of group names associated with the given username - """ + Arguments + ---------- + username : str + The username of the user making the request - if not development and not request.is_secure: - return generate_upgrade_request() + Returns + ------- + list + A list of group names associated with the given username + """ - result = current_user.get_group_names() + if not development and not request.is_secure: + return generate_upgrade_request() - return jsonify({"status": "success", "group_names": result[0]}) + result = current_user.get_group_names() - @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/repos/", methods=['GET', 'POST']) - @login_required - def get_user_groups_and_repos(): - """Get a list of user groups and their repos""" + return jsonify({"status": "success", "group_names": result[0]}) - if not development and not request.is_secure: - return generate_upgrade_request() +@app.route(f"/{AUGUR_API_VERSION}/user/groups/repos/", methods=['GET', 'POST']) +@login_required +def get_user_groups_and_repos(): + """Get a list of user groups and their repos""" - columns = request.args.get("columns") - if not columns: - return {"status": "Missing argument columns"} + if not development and not request.is_secure: + return generate_upgrade_request() - # split list by , and remove whitespaces from edges + columns = request.args.get("columns") + if not columns: + return {"status": "Missing argument columns"} - valid_columns = [] - columns = columns.split(",") - for column in columns: + # split list by , and remove whitespaces from edges - if column.isspace() or column == "": - continue + valid_columns = [] + columns = columns.split(",") + for column in columns: - valid_columns.append(column.strip()) + if column.isspace() or column == "": + continue - print(valid_columns) + valid_columns.append(column.strip()) + print(valid_columns) - data = [] - groups = current_user.groups - for group in groups: - repos = [repo.repo for repo in group.repos] + data = [] + groups = current_user.groups + for group in groups: - group_repo_dicts = [] - for repo in repos: + repos = [repo.repo for repo in group.repos] - repo_dict = {} - for column in valid_columns: - try: - repo_dict[column] = getattr(repo, column) - except AttributeError: - return {"status": f"'{column}' is not a valid repo column"} + group_repo_dicts = [] + for repo in repos: - group_repo_dicts.append(repo_dict) + repo_dict = {} + for column in valid_columns: + try: + repo_dict[column] = getattr(repo, column) + except AttributeError: + return {"status": f"'{column}' is not a valid repo column"} - group_data = {"repos": group_repo_dicts, "favorited": group.favorited} - data.append({group.name: group_data}) + group_repo_dicts.append(repo_dict) - return jsonify({"status": "success", "data": data}) + group_data = {"repos": group_repo_dicts, "favorited": group.favorited} + data.append({group.name: group_data}) + return jsonify({"status": "success", "data": data}) - @server.app.route(f"/{AUGUR_API_VERSION}/user/group/favorite/toggle", methods=['GET', 'POST']) - @login_required - def toggle_user_group_favorite(): - """Toggle the favorite status on a group - Returns - ------- - dict - A dictionairy with key of 'status' that indicates the success or failure of the operation - """ +@app.route(f"/{AUGUR_API_VERSION}/user/group/favorite/toggle", methods=['GET', 'POST']) +@login_required +def toggle_user_group_favorite(): + """Toggle the favorite status on a group - if not development and not request.is_secure: - return generate_upgrade_request() + Returns + ------- + dict + A dictionairy with key of 'status' that indicates the success or failure of the operation + """ - group_name = request.args.get("group_name") + if not development and not request.is_secure: + return generate_upgrade_request() - result = current_user.toggle_group_favorite(group_name) + group_name = request.args.get("group_name") - return jsonify(result[1]) + result = current_user.toggle_group_favorite(group_name) - @server.app.route(f"/{AUGUR_API_VERSION}/user/groups/favorites", methods=['GET', 'POST']) - @login_required - def get_favorite_groups(): - """Get a list of a users favorite groups + return jsonify(result[1]) - Returns - ------- - list - A list of group names - """ +@app.route(f"/{AUGUR_API_VERSION}/user/groups/favorites", methods=['GET', 'POST']) +@login_required +def get_favorite_groups(): + """Get a list of a users favorite groups - if not development and not request.is_secure: - return generate_upgrade_request() + Returns + ------- + list + A list of group names + """ - result = current_user.get_favorite_groups() - groups = result[0] - if groups is None: - return jsonify(result[1]) + if not development and not request.is_secure: + return generate_upgrade_request() + + result = current_user.get_favorite_groups() + groups = result[0] + if groups is None: + return jsonify(result[1]) - group_names = [group.name for group in groups] + group_names = [group.name for group in groups] - return jsonify({"status": "success", "group_names": group_names}) - \ No newline at end of file + return jsonify({"status": "success", "group_names": group_names}) + \ No newline at end of file diff --git a/augur/api/routes/util.py b/augur/api/routes/util.py index eac264f264..460f216ef6 100644 --- a/augur/api/routes/util.py +++ b/augur/api/routes/util.py @@ -13,215 +13,215 @@ logger = AugurLogger("augur").get_logger() from augur.api.routes import AUGUR_API_VERSION - -def create_routes(server): - - @server.app.route('/{}/repo-groups'.format(AUGUR_API_VERSION)) - def get_all_repo_groups(): #TODO: make this name automatic - wrapper? - repoGroupsSQL = s.sql.text(""" - SELECT * - FROM repo_groups - ORDER BY rg_name - """) - results = pd.read_sql(repoGroupsSQL, server.engine) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/repos'.format(AUGUR_API_VERSION)) - def get_all_repos(): - - get_all_repos_sql = s.sql.text(""" - SELECT - repo.repo_id, - repo.repo_name, - repo.description, - repo.repo_git AS url, - repo.repo_status, - a.commits_all_time, - b.issues_all_time, - c.pull_requests_all_time, - rg_name, - repo.repo_group_id - FROM - repo - left outer join - (select * from api_get_all_repos_commits ) a on - repo.repo_id = a.repo_id - left outer join - (select * from api_get_all_repos_issues) b - on - repo.repo_id = b.repo_id - left outer join - (select * from api_get_all_repo_prs) c - on repo.repo_id=c.repo_id - JOIN repo_groups ON repo_groups.repo_group_id = repo.repo_group_id - order by repo_name - """) - results = pd.read_sql(get_all_repos_sql, server.engine) - results['url'] = results['url'].apply(lambda datum: datum.split('//')[1]) - - b64_urls = [] - for i in results.index: - b64_urls.append(base64.b64encode((results.at[i, 'url']).encode())) - results['base64_url'] = b64_urls - - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/repo-groups//repos'.format(AUGUR_API_VERSION)) - def get_repos_in_repo_group(repo_group_id): - repos_in_repo_groups_SQL = s.sql.text(""" - SELECT - repo.repo_id, - repo.repo_name, - repo.description, - repo.repo_git AS url, - repo.repo_status, - a.commits_all_time, - b.issues_all_time, - c.pull_requests_all_time - FROM - repo - left outer join - (select repo_id, COUNT ( distinct commits.cmt_commit_hash ) AS commits_all_time from commits group by repo_id ) a on - repo.repo_id = a.repo_id - left outer join - (select repo_id, count ( issues.issue_id) as issues_all_time from issues where issues.pull_request IS NULL group by repo_id) b - on - repo.repo_id = b.repo_id - left outer join - (select * from api_get_all_repo_prs) c - on repo.repo_id=c.repo_id - JOIN repo_groups ON repo_groups.repo_group_id = repo.repo_group_id - WHERE - repo_groups.repo_group_id = :repo_group_id - ORDER BY repo.repo_git +from ..server import app, engine + + +@app.route('/{}/repo-groups'.format(AUGUR_API_VERSION)) +def get_all_repo_groups(): #TODO: make this name automatic - wrapper? + repoGroupsSQL = s.sql.text(""" + SELECT * + FROM repo_groups + ORDER BY rg_name + """) + results = pd.read_sql(repoGroupsSQL, engine) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/repos'.format(AUGUR_API_VERSION)) +def get_all_repos(): + + get_all_repos_sql = s.sql.text(""" + SELECT + repo.repo_id, + repo.repo_name, + repo.description, + repo.repo_git AS url, + repo.repo_status, + a.commits_all_time, + b.issues_all_time, + c.pull_requests_all_time, + rg_name, + repo.repo_group_id + FROM + repo + left outer join + (select * from api_get_all_repos_commits ) a on + repo.repo_id = a.repo_id + left outer join + (select * from api_get_all_repos_issues) b + on + repo.repo_id = b.repo_id + left outer join + (select * from api_get_all_repo_prs) c + on repo.repo_id=c.repo_id + JOIN repo_groups ON repo_groups.repo_group_id = repo.repo_group_id + order by repo_name + """) + results = pd.read_sql(get_all_repos_sql, engine) + results['url'] = results['url'].apply(lambda datum: datum.split('//')[1]) + + b64_urls = [] + for i in results.index: + b64_urls.append(base64.b64encode((results.at[i, 'url']).encode())) + results['base64_url'] = b64_urls + + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/repo-groups//repos'.format(AUGUR_API_VERSION)) +def get_repos_in_repo_group(repo_group_id): + repos_in_repo_groups_SQL = s.sql.text(""" + SELECT + repo.repo_id, + repo.repo_name, + repo.description, + repo.repo_git AS url, + repo.repo_status, + a.commits_all_time, + b.issues_all_time, + c.pull_requests_all_time + FROM + repo + left outer join + (select repo_id, COUNT ( distinct commits.cmt_commit_hash ) AS commits_all_time from commits group by repo_id ) a on + repo.repo_id = a.repo_id + left outer join + (select repo_id, count ( issues.issue_id) as issues_all_time from issues where issues.pull_request IS NULL group by repo_id) b + on + repo.repo_id = b.repo_id + left outer join + (select * from api_get_all_repo_prs) c + on repo.repo_id=c.repo_id + JOIN repo_groups ON repo_groups.repo_group_id = repo.repo_group_id + WHERE + repo_groups.repo_group_id = :repo_group_id + ORDER BY repo.repo_git + """) + + results = pd.read_sql(repos_in_repo_groups_SQL, engine, params={'repo_group_id': repo_group_id}) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/owner//repo/'.format(AUGUR_API_VERSION)) +def get_repo_by_git_name(owner, repo): + + get_repo_by_git_name_sql = s.sql.text(""" + SELECT repo.repo_id, repo.repo_group_id, rg_name + FROM repo JOIN repo_groups ON repo_groups.repo_group_id = repo.repo_group_id + WHERE repo_name = :repo AND repo_path LIKE :owner + GROUP BY repo_id, rg_name + """) + + results = pd.read_sql(get_repo_by_git_name_sql, engine, params={'owner': '%{}_'.format(owner), 'repo': repo,}) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/rg-name//repo-name/'.format(AUGUR_API_VERSION)) +def get_repo_by_name(rg_name, repo_name): + + get_repo_by_name_sql = s.sql.text(""" + SELECT repo_id, repo.repo_group_id, repo_git as url + FROM repo, repo_groups + WHERE repo.repo_group_id = repo_groups.repo_group_id + AND LOWER(rg_name) = LOWER(:rg_name) + AND LOWER(repo_name) = LOWER(:repo_name) + """) + results = pd.read_sql(get_repo_by_name_sql, engine, params={'rg_name': rg_name, 'repo_name': repo_name}) + results['url'] = results['url'].apply(lambda datum: datum.split('//')[1]) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/rg-name/'.format(AUGUR_API_VERSION)) +def get_group_by_name(rg_name): + groupSQL = s.sql.text(""" + SELECT repo_group_id, rg_name + FROM repo_groups + WHERE lower(rg_name) = lower(:rg_name) + """) + results = pd.read_sql(groupSQL, engine, params={'rg_name': rg_name}) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype="application/json") + +@app.route('/{}/dosocs/repos'.format(AUGUR_API_VERSION)) +def get_repos_for_dosocs(): + get_repos_for_dosocs_SQL = s.sql.text(""" + SELECT b.repo_id, CONCAT(a.value || b.repo_group_id || chr(47) || b.repo_path || b.repo_name) AS path + FROM settings a, repo b + WHERE a.setting='repo_directory' + """) + + results = pd.read_sql(get_repos_for_dosocs_SQL, engine) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype='application/json') + +@app.route('/{}/repo-groups//get-issues'.format(AUGUR_API_VERSION)) +@app.route('/{}/repos//get-issues'.format(AUGUR_API_VERSION)) +def get_issues(repo_group_id, repo_id=None): + if not repo_id: + get_issues_sql = s.sql.text(""" + SELECT issue_title, + issues.issue_id, + issues.repo_id, + issues.html_url, + issue_state AS STATUS, + issues.created_at AS DATE, + count(issue_events.event_id), + MAX(issue_events.created_at) AS LAST_EVENT_DATE, + EXTRACT(DAY FROM NOW() - issues.created_at) AS OPEN_DAY + FROM issues, + issue_events + WHERE issues.repo_id IN (SELECT repo_id FROM repo WHERE repo_group_id = :repo_group_id) + AND issues.issue_id = issue_events.issue_id + AND issues.pull_request is NULL + GROUP BY issues.issue_id + ORDER by OPEN_DAY DESC """) - - results = pd.read_sql(repos_in_repo_groups_SQL, server.engine, params={'repo_group_id': repo_group_id}) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/owner//repo/'.format(AUGUR_API_VERSION)) - def get_repo_by_git_name(owner, repo): - - get_repo_by_git_name_sql = s.sql.text(""" - SELECT repo.repo_id, repo.repo_group_id, rg_name - FROM repo JOIN repo_groups ON repo_groups.repo_group_id = repo.repo_group_id - WHERE repo_name = :repo AND repo_path LIKE :owner - GROUP BY repo_id, rg_name + results = pd.read_sql(get_issues_sql, engine, params={'repo_group_id': repo_group_id}) + else: + get_issues_sql = s.sql.text(""" + SELECT issue_title, + issues.issue_id, + issues.repo_id, + issues.html_url, + issue_state AS STATUS, + issues.created_at AS DATE, + count(issue_events.event_id), + MAX(issue_events.created_at) AS LAST_EVENT_DATE, + EXTRACT(DAY FROM NOW() - issues.created_at) AS OPEN_DAY, + repo_name + FROM issues JOIN repo ON issues.repo_id = repo.repo_id, issue_events + WHERE issues.repo_id = :repo_id + AND issues.pull_request IS NULL + AND issues.issue_id = issue_events.issue_id + GROUP BY issues.issue_id, repo_name + ORDER by OPEN_DAY DESC """) + results = pd.read_sql(get_issues_sql, engine, params={'repo_id': repo_id}) + data = results.to_json(orient="records", date_format='iso', date_unit='ms') + return Response(response=data, + status=200, + mimetype='application/json') - results = pd.read_sql(get_repo_by_git_name_sql, server.engine, params={'owner': '%{}_'.format(owner), 'repo': repo,}) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype="application/json") - - @server.app.route('/{}/rg-name//repo-name/'.format(AUGUR_API_VERSION)) - def get_repo_by_name(rg_name, repo_name): +@app.route('/{}/api-port'.format(AUGUR_API_VERSION)) +def api_port(): - get_repo_by_name_sql = s.sql.text(""" - SELECT repo_id, repo.repo_group_id, repo_git as url - FROM repo, repo_groups - WHERE repo.repo_group_id = repo_groups.repo_group_id - AND LOWER(rg_name) = LOWER(:rg_name) - AND LOWER(repo_name) = LOWER(:repo_name) - """) - results = pd.read_sql(get_repo_by_name_sql, server.engine, params={'rg_name': rg_name, 'repo_name': repo_name}) - results['url'] = results['url'].apply(lambda datum: datum.split('//')[1]) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype="application/json") + with DatabaseSession(logger) as session: - @server.app.route('/{}/rg-name/'.format(AUGUR_API_VERSION)) - def get_group_by_name(rg_name): - groupSQL = s.sql.text(""" - SELECT repo_group_id, rg_name - FROM repo_groups - WHERE lower(rg_name) = lower(:rg_name) - """) - results = pd.read_sql(groupSQL, server.engine, params={'rg_name': rg_name}) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, + response = {'port': AugurConfig(logger, session).get_value('Server', 'port')} + return Response(response=json.dumps(response), status=200, mimetype="application/json") - - @server.app.route('/{}/dosocs/repos'.format(AUGUR_API_VERSION)) - def get_repos_for_dosocs(): - get_repos_for_dosocs_SQL = s.sql.text(""" - SELECT b.repo_id, CONCAT(a.value || b.repo_group_id || chr(47) || b.repo_path || b.repo_name) AS path - FROM settings a, repo b - WHERE a.setting='repo_directory' - """) - - results = pd.read_sql(get_repos_for_dosocs_SQL, server.engine) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype='application/json') - - @server.app.route('/{}/repo-groups//get-issues'.format(AUGUR_API_VERSION)) - @server.app.route('/{}/repos//get-issues'.format(AUGUR_API_VERSION)) - def get_issues(repo_group_id, repo_id=None): - if not repo_id: - get_issues_sql = s.sql.text(""" - SELECT issue_title, - issues.issue_id, - issues.repo_id, - issues.html_url, - issue_state AS STATUS, - issues.created_at AS DATE, - count(issue_events.event_id), - MAX(issue_events.created_at) AS LAST_EVENT_DATE, - EXTRACT(DAY FROM NOW() - issues.created_at) AS OPEN_DAY - FROM issues, - issue_events - WHERE issues.repo_id IN (SELECT repo_id FROM repo WHERE repo_group_id = :repo_group_id) - AND issues.issue_id = issue_events.issue_id - AND issues.pull_request is NULL - GROUP BY issues.issue_id - ORDER by OPEN_DAY DESC - """) - results = pd.read_sql(get_issues_sql, server.engine, params={'repo_group_id': repo_group_id}) - else: - get_issues_sql = s.sql.text(""" - SELECT issue_title, - issues.issue_id, - issues.repo_id, - issues.html_url, - issue_state AS STATUS, - issues.created_at AS DATE, - count(issue_events.event_id), - MAX(issue_events.created_at) AS LAST_EVENT_DATE, - EXTRACT(DAY FROM NOW() - issues.created_at) AS OPEN_DAY, - repo_name - FROM issues JOIN repo ON issues.repo_id = repo.repo_id, issue_events - WHERE issues.repo_id = :repo_id - AND issues.pull_request IS NULL - AND issues.issue_id = issue_events.issue_id - GROUP BY issues.issue_id, repo_name - ORDER by OPEN_DAY DESC - """) - results = pd.read_sql(get_issues_sql, server.engine, params={'repo_id': repo_id}) - data = results.to_json(orient="records", date_format='iso', date_unit='ms') - return Response(response=data, - status=200, - mimetype='application/json') - - @server.app.route('/{}/api-port'.format(AUGUR_API_VERSION)) - def api_port(): - - with DatabaseSession(logger) as session: - - response = {'port': AugurConfig(logger, session).get_value('Server', 'port')} - return Response(response=json.dumps(response), - status=200, - mimetype="application/json") diff --git a/augur/api/server.py b/augur/api/server.py index 2f13eeab1a..a725193165 100644 --- a/augur/api/server.py +++ b/augur/api/server.py @@ -27,423 +27,358 @@ from augur.application.config import AugurConfig from metadata import __version__ as augur_code_version -from augur.api.routes import AUGUR_API_VERSION +# from augur.api.routes import AUGUR_API_VERSION +AUGUR_API_VERSION = "api/unstable" +show_metadata = False -class Server(): - """Initializes the server, creating the Flask application. +def get_file_id(path: str) -> str: + """Gets the file id of a given path. + + Args: + path: file path - Attributes: - logger (logging.Logger): handles logging - session (DatabaseSession): used to create the config - config (AugurConfig): used to access the config in the database - engine: Sqlalchemy database connection engine - cache: ? - server_cache: ? - app: Flask application - show_metadata (bool): ? + Examples: + If the path /augur/best_routes.py is given it will return "best_routes" + + Returns: + the filename as a string """ + return os.path.splitext(os.path.basename(path))[0] - def __init__(self): - """Initialize the Server class.""" - - self.logger = AugurLogger("server").get_logger() - self.session = DatabaseSession(self.logger) - self.config = AugurConfig(self.logger, self.session) - self.engine = self.session.engine - - self.cache_manager = self.create_cache_manager() - self.server_cache = self.get_server_cache() - self.app = None - self.show_metadata = False - - - def create_app(self): - """Define the flask app and configure the routes.""" - template_dir = str(Path(__file__).parent.parent / "templates") - static_dir = str(Path(__file__).parent.parent / "static") - - self.app = Flask(__name__, template_folder=template_dir, static_folder=static_dir) - self.logger.debug("Created Flask app") - - # defines the api version on the flask app, - # so when we pass the flask app to the routes files we - # know can access the api version via the app variable - self.app.augur_api_version = AUGUR_API_VERSION - - - CORS(self.app) - self.app.url_map.strict_slashes = False - - self.app.config['WTF_CSRF_ENABLED'] = False - - - self.logger.debug("Creating API routes...") - self.create_all_routes() - self.create_metrics() - - @self.app.route('/') - @self.app.route('/ping') - @self.app.route('/status') - @self.app.route('/healthcheck') - def index(): - """ - Redirects to health check route - """ - return redirect(self.app.augur_api_version) - - @self.app.route(f'/{self.app.augur_api_version}/') - @self.app.route(f'/{self.app.augur_api_version}/status') - def status(): - """ - Health check route - """ - status = { - 'status': 'OK', - 'version': augur_code_version - } - return Response(response=json.dumps(status), - status=200, - mimetype="application/json") - - def get_app(self) -> Optional[Flask]: - """Get flask app. +def create_metrics() -> None: + """Starts process of adding all the functions from the metrics folder to the flask app as routes.""" + # get a list of the metrics files + metric_files = get_metric_files() - Returns: - The flask applcation - """ - return self.app + # import the metric modules and add them to the flask app using add_metrics + for file in metric_files: + importlib.import_module(f"augur.api.metrics.{file}") + add_metrics(f"augur.api.metrics.{file}") - def create_all_routes(self): - """Add all the routes defined in the files in the augur/api/routes directory to the flask app.""" +def add_metrics(module_name: str) -> None: + """Determine type of metric and call function to add them to the flask app. + + This function takes modules that contains metrics, + and adds them to the flask app via the add_standard_metric + or add_toss_metric methods. + + Note: + The attribute is_metric and obj.metadata['type'] + are set in file augur/api/routes/util.py in the function + register_metric(). This function is a decorator and is + how a function is defined as a metric. + + Args: + module_name: path to the module + """ - # gets a list of the routes files - route_files = self.get_route_files() + # gets all the members in the module and loops through them + for _, obj in inspect.getmembers(sys.modules[module_name]): - for route_file in route_files: + # cheks if the object is a function + if inspect.isfunction(obj) is True: - # imports the routes file - module = importlib.import_module('.' + route_file, 'augur.api.routes') + # checks if the function has the attribute is_metric. + # If it does then it is a metric function and needs to be added to the flask app + if hasattr(obj, 'is_metric') is True: - # each file that contains routes must contain a create_routes function - # and this line is calling that function and passing the flask app, - # so that the routes in the files can be added to the flask app - module.create_routes(self) - - for route_file in ["augur_view", "routes", "api"]: - module = importlib.import_module('.' + route_file, 'augur.api.view') + # determines the type of metric and calls the correct method to add it to the flask app + if obj.metadata['type'] == "standard": + add_standard_metric(obj, obj.metadata['endpoint']) + if obj.metadata['type'] == "toss": + add_toss_metric(obj, obj.metadata['endpoint']) - module.create_routes(self) - - def get_route_files(self) -> List[str]: - """This function gets a list of all the routes files in the augur/api/routes directory. - - Returns: - list of file names as strings - """ - route_files = [] - for filename in glob.iglob("augur/api/routes/*"): - file_id = self.get_file_id(filename) - # this filters out files like __init__ and __pycache__. And makes sure it only get py files - if not file_id.startswith('__') and filename.endswith('.py'): - route_files.append(file_id) +def get_metric_files() -> List[str]: + """Get list of all the metrics files in the augur/api/metrics directory, - return route_files + Returns: + list of file names + """ + metric_files = [] + for filename in glob.iglob("augur/api/metrics/**"): + file_id = get_file_id(filename) + + # this filters out files like __init__ and __pycache__. And makes sure it only get py files + if not file_id.startswith('__') and filename.endswith('.py'): + metric_files.append(file_id) + return metric_files - def get_file_id(self, path: str) -> str: - """Gets the file id of a given path. +# NOTE: Paramater on=None removed, since it is not used in the function Aug 18, 2022 - Andrew Brain +def route_transform(func: Any, args: Any=None, kwargs: dict=None, repo_url_base: str=None, orient: str ='records', + group_by: str=None, aggregate: str='sum', resample=None, date_col: str='date') -> str: + """Call a metric function and apply data transformations. + + Note: + This function takes a function and it arguments, calls the function, then converts it to json if possible. + It also does some manipulation of the data if paramaters like group_by, aggregate, and respample are set + + Args: + func: function that is called + args: + kwargs: + repo_url_base: + orient: + group_byf + on + aggregate: + resample: + date_col: + + Returns: + The result of calling the function and applying the data transformations + """ + # this defines the way a pandas dataframe is converted to json + if orient is None: + orient = 'records' + + result = '' + + if not show_metadata: + + if args is None: + args = () - Args: - path: file path + if kwargs is None: + kwargs = {} + + if repo_url_base: + kwargs['repo_url'] = str(base64.b64decode(repo_url_base).decode()) + + # calls the function that was passed to get the data + data = func(*args, **kwargs) - Examples: - If the path /augur/best_routes.py is given it will return "best_routes" + # most metrics return a pandas dataframe, which has the attribute to_json + # so basically this is checking if it is a pandas dataframe + if hasattr(data, 'to_json'): + + # if group_by is defined it groups by the group_by value + # and uses the aggregate to determine the operation performed + if group_by is not None: + data = data.group_by(group_by).aggregate(aggregate) + + # This code block is resampling the pandas dataframe, here is the documentation for it + # https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.resample.html + if resample is not None: + data['idx'] = pd.to_datetime(data[date_col]) + data = data.set_index('idx') + data = data.resample(resample).aggregate(aggregate) + data['date'] = data.index + + # converts pandas dataframe to json + result = data.to_json(orient=orient, date_format='iso', date_unit='ms') + else: + # trys to convert dict to json + try: + + result = json.dumps(data) + except: + result = data + else: + result = json.dumps(func.metadata) - Returns: - the filename as a string - """ - return os.path.splitext(os.path.basename(path))[0] + # returns the result of the function + return result +def flaskify(function: Any) -> Any: + """Simplifies API endpoints that just accept owner and repo, transforms them and spits them out. + """ + if cache_manager: + def cache_generated_function(*args, **kwargs): + def heavy_lifting(): + return route_transform(function, args, kwargs, **request.args.to_dict()) + body = server_cache.get(key=str(request.url), createfunc=heavy_lifting) + return Response(response=body, + status=200, + mimetype="application/json") + cache_generated_function.__name__ = function.__name__ + logger.info(cache_generated_function.__name__) + return cache_generated_function + + def generated_function(*args, **kwargs): + kwargs.update(request.args.to_dict()) + return Response(response=route_transform(function, args, kwargs, **request.args.to_dict()), + status=200, + mimetype="application/json") + generated_function.__name__ = function.__name__ + return generated_function + +def routify(func: Any, endpoint_type: str) -> Any: + """Wraps a metric function allowing it to be mapped to a route, + get request args and also transforms the metric functions's + output to json + + :param func: The function to be wrapped + :param endpoint_type: The type of API endpoint, i.e. 'repo_group' or 'repo' + """ - - def create_metrics(self) -> None: - """Starts process of adding all the functions from the metrics folder to the flask app as routes.""" - # get a list of the metrics files - metric_files = self.get_metric_files() + # this is that is generated by routify() as passed to the app.route() decorator + # basically this is the function that is called when an endpoint is pinged + def endpoint_function(*args, **kwargs) -> Response: - # import the metric modules and add them to the flask app using self.add_metrics - for file in metric_files: - importlib.import_module(f"augur.api.metrics.{file}") - self.add_metrics(f"augur.api.metrics.{file}") + # sets the kwargs as the query paramaters or the arguments sent in the headers + kwargs.update(request.args.to_dict()) + # if repo_group_id is not specified, it sets it to 1 which is the default repo group + if 'repo_group_id' not in kwargs and func.metadata["type"] != "toss": + kwargs['repo_group_id'] = 1 - def add_metrics(self, module_name: str) -> None: - """Determine type of metric and call function to add them to the flask app. - - This function takes modules that contains metrics, - and adds them to the flask app via the self.add_standard_metric - or self.add_toss_metric methods. - Note: - The attribute is_metric and obj.metadata['type'] - are set in file augur/api/routes/util.py in the function - register_metric(). This function is a decorator and is - how a function is defined as a metric. + # this function call takes the arguments specified when the endpoint is pinged + # and calls the actual function in the metrics folder and then returns the result + # NOTE: This also converts the data into json if the function returns a pandas dataframe or dict + data = route_transform(func, args, kwargs) + - Args: - module_name: path to the module - """ + # this is where the Response is created for all the metrics + return Response(response=data, + status=200, + mimetype="application/json") + + # this sets the name of the endpoint function + # so that the repo_endpoint, repo_group_endpoint, and deprecated_repo_endpoint + # don't create endpoint funcitons with the same name + endpoint_function.__name__ = f"{endpoint_type}_" + func.__name__ + return endpoint_function + + +def add_standard_metric(function: Any, endpoint: str) -> None: + """Add standard metric routes to the flask app. + + Args: + function: the function that needs to be mapped to the routes + endpoint: the path that the endpoint should be defined as + """ + repo_endpoint = f'/{app.augur_api_version}/repos//{endpoint}' + repo_group_endpoint = f'/{app.augur_api_version}/repo-groups//{endpoint}' + deprecated_repo_endpoint = f'/{app.augur_api_version}/repo-groups//repos//{endpoint}' - # gets all the members in the module and loops through them - for _, obj in inspect.getmembers(sys.modules[module_name]): + + # These three lines are defining routes on the flask app, and passing a function. + # Essetially the strucutre of this is app.route(endpoint)(function). + # So when this code is executed, it calls routify() which returns a function. + # The function that is returned is the function that is registerred with the route, and called when the route is pinged + + # Simply routify() is called by the route being pinged, and + # then routify() returns a function so it is called, + # and then that function returns a Response + app.route(repo_endpoint)(routify(function, 'repo')) + app.route(repo_group_endpoint)(routify(function, 'repo_group')) + app.route(deprecated_repo_endpoint )(routify(function, 'deprecated_repo')) + +def add_toss_metric(function: Any, endpoint: str) -> None: + """Add toss metric routes to the flask app. + + Args: + function: the function that needs to be mapped to the routes + endpoint: the path that the endpoint should be defined as + """ + repo_endpoint = f'/{app.augur_api_version}/repos//{endpoint}' + app.route(repo_endpoint)(routify(function, 'repo')) - # cheks if the object is a function - if inspect.isfunction(obj) is True: +def create_cache_manager() -> CacheManager: + """Create cache for endpoints? + + Returns: + manager of the cache + """ - # checks if the function has the attribute is_metric. - # If it does then it is a metric function and needs to be added to the flask app - if hasattr(obj, 'is_metric') is True: + cache_config = { + 'cache.type': 'file', + 'cache.data_dir': 'runtime/cache/', + 'cache.lock_dir': 'runtime/cache/' +} - # determines the type of metric and calls the correct method to add it to the flask app - if obj.metadata['type'] == "standard": - self.add_standard_metric(obj, obj.metadata['endpoint']) - if obj.metadata['type'] == "toss": - self.add_toss_metric(obj, obj.metadata['endpoint']) + if not os.path.exists(cache_config['cache.data_dir']): + os.makedirs(cache_config['cache.data_dir']) + if not os.path.exists(cache_config['cache.lock_dir']): + os.makedirs(cache_config['cache.lock_dir']) + cache_parsed = parse_cache_config_options(cache_config) + cache = CacheManager(**cache_parsed) + return cache +def get_server_cache(config, cache_manager) -> Cache: + """Create the server cache, set expiration, and clear + + Returns: + server cache + """ - def get_metric_files(self) -> List[str]: - """Get list of all the metrics files in the augur/api/metrics directory, + expire = int(config.get_value('Server', 'cache_expire')) + server_cache = cache_manager.get_cache('server', expire=expire) + server_cache.clear() - Returns: - list of file names - """ - metric_files = [] + return server_cache - for filename in glob.iglob("augur/api/metrics/**"): - file_id = self.get_file_id(filename) - - # this filters out files like __init__ and __pycache__. And makes sure it only get py files - if not file_id.startswith('__') and filename.endswith('.py'): - metric_files.append(file_id) - return metric_files - - # NOTE: Paramater on=None removed, since it is not used in the function Aug 18, 2022 - Andrew Brain - def transform(self, func: Any, args: Any=None, kwargs: dict=None, repo_url_base: str=None, orient: str ='records', - group_by: str=None, aggregate: str='sum', resample=None, date_col: str='date') -> str: - """Call a metric function and apply data transformations. - - Note: - This function takes a function and it arguments, calls the function, then converts it to json if possible. - It also does some manipulation of the data if paramaters like group_by, aggregate, and respample are set - - Args: - func: function that is called - args: - kwargs: - repo_url_base: - orient: - group_byf - on - aggregate: - resample: - date_col: - - Returns: - The result of calling the function and applying the data transformations - """ - # this defines the way a pandas dataframe is converted to json - if orient is None: - orient = 'records' - - result = '' - - if not self.show_metadata: - - if args is None: - args = () - - if kwargs is None: - kwargs = {} - - if repo_url_base: - kwargs['repo_url'] = str(base64.b64decode(repo_url_base).decode()) - - # calls the function that was passed to get the data - data = func(*args, **kwargs) - - # most metrics return a pandas dataframe, which has the attribute to_json - # so basically this is checking if it is a pandas dataframe - if hasattr(data, 'to_json'): - - # if group_by is defined it groups by the group_by value - # and uses the aggregate to determine the operation performed - if group_by is not None: - data = data.group_by(group_by).aggregate(aggregate) - - # This code block is resampling the pandas dataframe, here is the documentation for it - # https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.resample.html - if resample is not None: - data['idx'] = pd.to_datetime(data[date_col]) - data = data.set_index('idx') - data = data.resample(resample).aggregate(aggregate) - data['date'] = data.index - - # converts pandas dataframe to json - result = data.to_json(orient=orient, date_format='iso', date_unit='ms') - else: - # trys to convert dict to json - try: - - result = json.dumps(data) - except: - result = data - else: - result = json.dumps(func.metadata) - - # returns the result of the function - return result - - def flaskify(self, function: Any) -> Any: - """Simplifies API endpoints that just accept owner and repo, transforms them and spits them out. - """ - if self.cache_manager: - def cache_generated_function(*args, **kwargs): - def heavy_lifting(): - return self.transform(function, args, kwargs, **request.args.to_dict()) - body = self.server_cache.get(key=str(request.url), createfunc=heavy_lifting) - return Response(response=body, - status=200, - mimetype="application/json") - cache_generated_function.__name__ = function.__name__ - self.logger.info(cache_generated_function.__name__) - return cache_generated_function - - def generated_function(*args, **kwargs): - kwargs.update(request.args.to_dict()) - return Response(response=self.transform(function, args, kwargs, **request.args.to_dict()), - status=200, - mimetype="application/json") - generated_function.__name__ = function.__name__ - return generated_function - def routify(self, func: Any, endpoint_type: str) -> Any: - """Wraps a metric function allowing it to be mapped to a route, - get request args and also transforms the metric functions's - output to json - :param func: The function to be wrapped - :param endpoint_type: The type of API endpoint, i.e. 'repo_group' or 'repo' - """ +logger = AugurLogger("server").get_logger() +db_session = DatabaseSession(logger) +augur_config = AugurConfig(logger, db_session) +engine = db_session.engine - # this is that is generated by self.routify() as passed to the self.app.route() decorator - # basically this is the function that is called when an endpoint is pinged - def endpoint_function(*args, **kwargs) -> Response: +template_dir = str(Path(__file__).parent.parent / "templates") +static_dir = str(Path(__file__).parent.parent / "static") - # sets the kwargs as the query paramaters or the arguments sent in the headers - kwargs.update(request.args.to_dict()) +app = Flask(__name__, template_folder=template_dir, static_folder=static_dir) +logger.debug("Created Flask app") - # if repo_group_id is not specified, it sets it to 1 which is the default repo group - if 'repo_group_id' not in kwargs and func.metadata["type"] != "toss": - kwargs['repo_group_id'] = 1 +# defines the api version on the flask app, +# so when we pass the flask app to the routes files we +# know can access the api version via the app variable +app.augur_api_version = AUGUR_API_VERSION - # this function call takes the arguments specified when the endpoint is pinged - # and calls the actual function in the metrics folder and then returns the result - # NOTE: This also converts the data into json if the function returns a pandas dataframe or dict - data = self.transform(func, args, kwargs) +CORS(app) +app.url_map.strict_slashes = False +app.config['WTF_CSRF_ENABLED'] = False - # this is where the Response is created for all the metrics - return Response(response=data, - status=200, - mimetype="application/json") - # this sets the name of the endpoint function - # so that the repo_endpoint, repo_group_endpoint, and deprecated_repo_endpoint - # don't create endpoint funcitons with the same name - endpoint_function.__name__ = f"{endpoint_type}_" + func.__name__ - return endpoint_function +logger.debug("Creating API routes...") +create_metrics() - - def add_standard_metric(self, function: Any, endpoint: str) -> None: - """Add standard metric routes to the flask app. - - Args: - function: the function that needs to be mapped to the routes - endpoint: the path that the endpoint should be defined as - """ - repo_endpoint = f'/{self.app.augur_api_version}/repos//{endpoint}' - repo_group_endpoint = f'/{self.app.augur_api_version}/repo-groups//{endpoint}' - deprecated_repo_endpoint = f'/{self.app.augur_api_version}/repo-groups//repos//{endpoint}' +@app.route('/ping') +@app.route('/status') +@app.route('/healthcheck') +def index(): + """ + Redirects to health check route + """ + return redirect(app.augur_api_version) - - # These three lines are defining routes on the flask app, and passing a function. - # Essetially the strucutre of this is self.app.route(endpoint)(function). - # So when this code is executed, it calls self.routify() which returns a function. - # The function that is returned is the function that is registerred with the route, and called when the route is pinged - - # Simply self.routify() is called by the route being pinged, and - # then self.routify() returns a function so it is called, - # and then that function returns a Response - self.app.route(repo_endpoint)(self.routify(function, 'repo')) - self.app.route(repo_group_endpoint)(self.routify(function, 'repo_group')) - self.app.route(deprecated_repo_endpoint )(self.routify(function, 'deprecated_repo')) - - def add_toss_metric(self, function: Any, endpoint: str) -> None: - """Add toss metric routes to the flask app. - - Args: - function: the function that needs to be mapped to the routes - endpoint: the path that the endpoint should be defined as - """ - repo_endpoint = f'/{self.app.augur_api_version}/repos//{endpoint}' - self.app.route(repo_endpoint)(self.routify(function, 'repo')) - - def create_cache_manager(self) -> CacheManager: - """Create cache for endpoints? - - Returns: - manager of the cache - """ - - cache_config = { - 'cache.type': 'file', - 'cache.data_dir': 'runtime/cache/', - 'cache.lock_dir': 'runtime/cache/' +@app.route(f'/{app.augur_api_version}/') +@app.route(f'/{app.augur_api_version}/status') +def status(): + """ + Health check route + """ + status = { + 'status': 'OK', + 'version': augur_code_version } + return Response(response=json.dumps(status), + status=200, + mimetype="application/json") - if not os.path.exists(cache_config['cache.data_dir']): - os.makedirs(cache_config['cache.data_dir']) - if not os.path.exists(cache_config['cache.lock_dir']): - os.makedirs(cache_config['cache.lock_dir']) - cache_parsed = parse_cache_config_options(cache_config) - cache = CacheManager(**cache_parsed) +from .routes import * - return cache +# import frontend routes +from .view.augur_view import * +from .view.routes import * +from .view.api import * - def get_server_cache(self) -> Cache: - """Create the server cache, set expiration, and clear - - Returns: - server cache - """ +cache_manager = create_cache_manager() +server_cache = get_server_cache(augur_config, cache_manager) - expire = int(self.config.get_value('Server', 'cache_expire')) - server_cache = self.cache_manager.get_cache('server', expire=expire) - server_cache.clear() - return server_cache -# this is where the flask app is defined and the server is insantiated -server = Server() -server.create_app() -app = server.get_app() diff --git a/augur/api/view/api.py b/augur/api/view/api.py index f31fbd1057..2ca0d3eb92 100644 --- a/augur/api/view/api.py +++ b/augur/api/view/api.py @@ -1,141 +1,139 @@ from flask import Flask, render_template, render_template_string, request, abort, jsonify, redirect, url_for, session, flash from flask_login import current_user, login_required from augur.application.db.models import Repo -from augur.application.db.session import DatabaseSession # from augur.util.repo_load_controller import parse_org_url, parse_repo_url from .utils import * - -def create_routes(server): - @server.app.route('/cache/file/') - @server.app.route('/cache/file/') - def cache(file=None): - if file is None: - return redirect(url_for('root', path=getSetting('caching'))) - return redirect(url_for('root', path=toCacheFilepath(file))) - - @server.app.route('/account/repos/add', methods = ['POST']) - @login_required - def av_add_user_repo(): - url = request.form.get("url") - group = request.form.get("group_name") - - if group == "None": - group = current_user.login_name + "_default" - - if not url or not group: - flash("Repo or org URL must not be empty") - elif Repo.parse_github_org_url(url): - current_user.add_org(group, url) - flash("Successfully added org") - elif Repo.parse_github_repo_url(url): - current_user.add_repo(group, url) - flash("Successfully added repo") - else: - flash("Invalid repo or org url") - - return redirect(url_for("user_settings") + "?section=tracker") - - @server.app.route('/account/update', methods = ['POST']) - @login_required - def user_update_password(): - old_password = request.form.get("password") - new_password = request.form.get("new_password") - - if current_user.update_password(old_password, new_password): - flash(f"Account {current_user.login_name} successfully updated") - else: - flash("An error occurred updating the account") - - return redirect(url_for("user_settings")) +from ..server import app, db_session + +@app.route('/cache/file/') +@app.route('/cache/file/') +def cache(file=None): + if file is None: + return redirect(url_for('root', path=getSetting('caching'))) + return redirect(url_for('root', path=toCacheFilepath(file))) + +@app.route('/account/repos/add', methods = ['POST']) +@login_required +def av_add_user_repo(): + url = request.form.get("url") + group = request.form.get("group_name") + + if group == "None": + group = current_user.login_name + "_default" + + if not url or not group: + flash("Repo or org URL must not be empty") + elif Repo.parse_github_org_url(url): + current_user.add_org(group, url) + flash("Successfully added org") + elif Repo.parse_github_repo_url(url): + current_user.add_repo(group, url) + flash("Successfully added repo") + else: + flash("Invalid repo or org url") + + return redirect(url_for("user_settings") + "?section=tracker") + +@app.route('/account/update', methods = ['POST']) +@login_required +def user_update_password(): + old_password = request.form.get("password") + new_password = request.form.get("new_password") + + if current_user.update_password(old_password, new_password): + flash(f"Account {current_user.login_name} successfully updated") + else: + flash("An error occurred updating the account") - @server.app.route('/account/group/add', methods = ['POST']) - @login_required - def user_add_group(): - group = request.form.get("group_name") - - if not group: - flash("No group name provided") - elif current_user.add_group(group): - flash(f"Successfully added group {group}") - else: - flash("An error occurred adding group") - - return redirect(url_for("user_settings") + "?section=tracker") - - @server.app.route('/account/group/remove') - @login_required - def user_remove_group(): - group = request.args.get("group_name") - - if not group: - flash("No group name provided") - elif current_user.remove_group(group): - flash(f"Successfully removed group {group}") - else: - flash("An error occurred removing group") - - return redirect(url_for("user_settings") + "?section=tracker") - - @server.app.route('/account/repo/remove') - @login_required - def user_remove_repo(): - group = request.args.get("group_name") - repo = request.args.get("repo_id") - - if not repo: - flash("No repo id provided") - if not group: - flash("No group name provided") - - repo = int(repo) - - with DatabaseSession(logger) as session: - result = current_user.remove_repo(session, group, repo)[0] - - if result: - flash(f"Successfully removed repo {repo} from group {group}") - else: - flash("An error occurred removing repo from group") - - return redirect(url_for("user_group_view") + f"?group={group}") + return redirect(url_for("user_settings")) + +@app.route('/account/group/add', methods = ['POST']) +@login_required +def user_add_group(): + group = request.form.get("group_name") + + if not group: + flash("No group name provided") + elif current_user.add_group(group): + flash(f"Successfully added group {group}") + else: + flash("An error occurred adding group") - @server.app.route('/account/application/deauthorize') - @login_required - def user_app_deauthorize(): - token = request.args.get("token") - - if not token: - flash("No application provided") - elif current_user.invalidate_session(token): - flash("Successfully deauthorized application") - else: - flash("Invalid application token") - - return redirect(url_for("user_settings") + "?section=application") + return redirect(url_for("user_settings") + "?section=tracker") + +@app.route('/account/group/remove') +@login_required +def user_remove_group(): + group = request.args.get("group_name") + + if not group: + flash("No group name provided") + elif current_user.remove_group(group): + flash(f"Successfully removed group {group}") + else: + flash("An error occurred removing group") - @server.app.route('/account/application/create', methods = ['POST']) - @login_required - def user_app_create(): - name = request.form.get("app_name") - url = request.form.get("app_url") - - if not name or not url: - flash("Must provide app name and redirect URL") - elif current_user.add_app(name, url): - flash("Successfully created app") - else: - flash("Could not create app") - - return redirect(url_for("user_settings") + "?section=application") + return redirect(url_for("user_settings") + "?section=tracker") + +@app.route('/account/repo/remove') +@login_required +def user_remove_repo(): + group = request.args.get("group_name") + repo = request.args.get("repo_id") + + if not repo: + flash("No repo id provided") + if not group: + flash("No group name provided") + repo = int(repo) + + result = current_user.remove_repo(db_session, group, repo)[0] + + if result: + flash(f"Successfully removed repo {repo} from group {group}") + else: + flash("An error occurred removing repo from group") + + return redirect(url_for("user_group_view") + f"?group={group}") + +@app.route('/account/application/deauthorize') +@login_required +def user_app_deauthorize(): + token = request.args.get("token") + + if not token: + flash("No application provided") + elif current_user.invalidate_session(token): + flash("Successfully deauthorized application") + else: + flash("Invalid application token") + + return redirect(url_for("user_settings") + "?section=application") + +@app.route('/account/application/create', methods = ['POST']) +@login_required +def user_app_create(): + name = request.form.get("app_name") + url = request.form.get("app_url") + + if not name or not url: + flash("Must provide app name and redirect URL") + elif current_user.add_app(name, url): + flash("Successfully created app") + else: + flash("Could not create app") - """ ---------------------------------------------------------------- - Locking request loop: - This route will lock the current request until the - report request completes. A json response is guaranteed. - Assumes that the requested repo exists. - """ - @server.app.route('/requests/report/wait/') - def wait_for_report_request(id): - requestReports(id) - return jsonify(report_requests[id]) + return redirect(url_for("user_settings") + "?section=application") + + +""" ---------------------------------------------------------------- +Locking request loop: + This route will lock the current request until the + report request completes. A json response is guaranteed. + Assumes that the requested repo exists. +""" +@app.route('/requests/report/wait/') +def wait_for_report_request(id): + requestReports(id) + return jsonify(report_requests[id]) diff --git a/augur/api/view/augur_view.py b/augur/api/view/augur_view.py index 7124067ce5..790b1f3f4f 100644 --- a/augur/api/view/augur_view.py +++ b/augur/api/view/augur_view.py @@ -5,8 +5,8 @@ from .init import logger # from .server import User +from ..server import app, db_session from augur.application.db.models import User, UserSessionToken -from augur.application.db.session import DatabaseSession from augur.api.routes import AUGUR_API_VERSION from augur.api.util import get_bearer_token @@ -14,100 +14,93 @@ login_manager = LoginManager() -def create_routes(server): +login_manager.init_app(app) - login_manager.init_app(server.app) +app.secret_key = getSetting("session_key") - server.app.secret_key = getSetting("session_key") +app.url_map.converters['list'] = ListConverter +app.url_map.converters['bool'] = BoolConverter +app.url_map.converters['json'] = JSONConverter - server.app.url_map.converters['list'] = ListConverter - server.app.url_map.converters['bool'] = BoolConverter - server.app.url_map.converters['json'] = JSONConverter +# Code 404 response page, for pages not found +@app.errorhandler(404) +def page_not_found(error): + if AUGUR_API_VERSION in str(request.url_rule): + return jsonify({"status": "Not Found"}), 404 - # Code 404 response page, for pages not found - @server.app.errorhandler(404) - def page_not_found(error): - if AUGUR_API_VERSION in str(request.url_rule): - return jsonify({"status": "Not Found"}), 404 + return render_template('index.j2', title='404', api_url=getSetting('serving')), 404 - return render_template('index.j2', title='404', api_url=getSetting('serving')), 404 +@app.errorhandler(405) +def unsupported_method(error): - @server.app.errorhandler(405) - def unsupported_method(error): + if AUGUR_API_VERSION in str(request.url_rule): + return jsonify({"status": "Unsupported method"}), 405 + + return render_message("405 - Method not supported", "The resource you are trying to access does not support the request method used"), 405 - if AUGUR_API_VERSION in str(request.url_rule): - return jsonify({"status": "Unsupported method"}), 405 - - return render_message("405 - Method not supported", "The resource you are trying to access does not support the request method used"), 405 - - @login_manager.unauthorized_handler - def unauthorized(): - - if AUGUR_API_VERSION in str(request.url_rule): +@login_manager.unauthorized_handler +def unauthorized(): - with DatabaseSession(logger) as db_session: + if AUGUR_API_VERSION in str(request.url_rule): - token_str = get_bearer_token() - token = db_session.query(UserSessionToken).filter(UserSessionToken.token == token_str).first() - if not token: - return jsonify({"status": "Session expired"}) + token_str = get_bearer_token() + token = db_session.query(UserSessionToken).filter(UserSessionToken.token == token_str).first() + if not token: + return jsonify({"status": "Session expired"}) - return jsonify({"status": "Login required"}) + return jsonify({"status": "Login required"}) - session["login_next"] = url_for(request.endpoint, **request.args) - return redirect(url_for('user_login')) + session["login_next"] = url_for(request.endpoint, **request.args) + return redirect(url_for('user_login')) - @login_manager.user_loader - def load_user(user_id): +@login_manager.user_loader +def load_user(user_id): - db_session = DatabaseSession(logger) - - user = User.get_user(db_session, user_id) - if not user: - return None + user = User.get_user(db_session, user_id) + if not user: + return None - groups = user.groups - tokens = user.tokens - applications = user.applications - for application in applications: - sessions = application.sessions - for group in groups: - repos = group.repos - for token in tokens: - application = token.application - db_session.expunge(user) + groups = user.groups + tokens = user.tokens + applications = user.applications + for application in applications: + sessions = application.sessions + for group in groups: + repos = group.repos + for token in tokens: + application = token.application + db_session.expunge(user) - # The flask_login library sets a unique session["_id"] - # when login_user() is called successfully - if session.get("_id") is not None: - - user._is_authenticated = True - user._is_active = True + # The flask_login library sets a unique session["_id"] + # when login_user() is called successfully + if session.get("_id") is not None: + + user._is_authenticated = True + user._is_active = True - return user + return user - @login_manager.request_loader - def load_user_request(request): +@login_manager.request_loader +def load_user_request(request): - print(f"Current time of user request: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}") - token = get_bearer_token() - session = DatabaseSession(logger) + print(f"Current time of user request: {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))}") + token = get_bearer_token() - current_time = int(time.time()) - token = session.query(UserSessionToken).filter(UserSessionToken.token == token, UserSessionToken.expiration >= current_time).first() - if token: + current_time = int(time.time()) + token = db_session.query(UserSessionToken).filter(UserSessionToken.token == token, UserSessionToken.expiration >= current_time).first() + if token: - print("Valid user") + print("Valid user") - user = token.user - user._is_authenticated = True - user._is_active = True + user = token.user + user._is_authenticated = True + user._is_active = True - return user - - return None + return user + + return None - @server.app.template_filter('as_datetime') - def as_datetime(seconds): - time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(seconds)) \ No newline at end of file +@app.template_filter('as_datetime') +def as_datetime(seconds): + time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(seconds)) \ No newline at end of file diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index fd6b32b755..de8b9a10ce 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -6,327 +6,319 @@ from augur.application.db.models import User, Repo, ClientApplication from .server import LoginException -from augur.application.db.session import DatabaseSession from augur.tasks.init.redis_connection import redis_connection as redis from augur.application.util import * from augur.application.config import AugurConfig +from ..server import app, db_session logger = logging.getLogger(__name__) # ROUTES ----------------------------------------------------------------------- -def create_routes(server): - """ ---------------------------------------------------------------- - root: - This route returns a redirect to the application root, appended - by the provided path, if any. - """ - @server.app.route('/root/') - @server.app.route('/root/') - def root(path=""): - return redirect(getSetting("approot") + path) - - """ ---------------------------------------------------------------- - logo: - this route returns a redirect to the application logo associated - with the provided brand, otherwise the inverted Augur logo if no - brand is provided. - """ - @server.app.route('/logo/') - @server.app.route('/logo/') - def logo(brand=None): - if brand is None: - return redirect(url_for('static', filename='img/augur_logo.png')) - elif "augur" in brand: - return logo(None) - elif "chaoss" in brand: - return redirect(url_for('static', filename='img/Chaoss_Logo_white.png')) - return "" - - """ ---------------------------------------------------------------- - default: - table: - This route returns the default view of the application, which - is currently defined as the repository table view - """ - @server.app.route('/') - @server.app.route('/repos/views/table') - def repo_table_view(): - query = request.args.get('q') - try: - page = int(request.args.get('p') or 0) - except: - page = 1 - - sorting = request.args.get('s') - rev = request.args.get('r') - - if rev is not None: - if rev == "False": - rev = False - elif rev == "True": - rev = True - - direction = "DESC" if rev else "ASC" - - with DatabaseSession(logger) as db_session: - config = AugurConfig(logger, db_session) +""" ---------------------------------------------------------------- +root: + This route returns a redirect to the application root, appended + by the provided path, if any. +""" +@app.route('/root/') +@app.route('/root/') +def root(path=""): + return redirect(getSetting("approot") + path) + +""" ---------------------------------------------------------------- +logo: + this route returns a redirect to the application logo associated + with the provided brand, otherwise the inverted Augur logo if no + brand is provided. +""" +@app.route('/logo/') +@app.route('/logo/') +def logo(brand=None): + if brand is None: + return redirect(url_for('static', filename='img/augur_logo.png')) + elif "augur" in brand: + return logo(None) + elif "chaoss" in brand: + return redirect(url_for('static', filename='img/Chaoss_Logo_white.png')) + return "" + +""" ---------------------------------------------------------------- +default: +table: + This route returns the default view of the application, which + is currently defined as the repository table view +""" +@app.route('/') +@app.route('/repos/views/table') +def repo_table_view(): + query = request.args.get('q') + try: + page = int(request.args.get('p') or 0) + except: + page = 1 + + sorting = request.args.get('s') + rev = request.args.get('r') + + if rev is not None: + if rev == "False": + rev = False + elif rev == "True": + rev = True - pagination_offset = config.get_value("frontend", "pagination_offset") - - - if current_user.is_authenticated: - data = current_user.get_repos(page = page, sort = sorting, direction = direction)[0] - page_count = (current_user.get_repo_count()[0] or 0) // pagination_offset - else: - data = get_all_repos(page = page, sort = sorting, direction = direction)[0] - page_count = (get_all_repos_count()[0] or 0) // pagination_offset - - #if not cacheFileExists("repos.json"): - # return renderLoading("repos/views/table", query, "repos.json") - - # return renderRepos("table", query, data, sorting, rev, page, True) - return render_module("repos-table", title="Repos", repos=data, query_key=query, activePage=page, pages=page_count, offset=pagination_offset, PS="repo_table_view", reverse = rev, sorting = sorting) - - """ ---------------------------------------------------------------- - card: - This route returns the repository card view - """ - @server.app.route('/repos/views/card') - def repo_card_view(): - query = request.args.get('q') - if current_user.is_authenticated: - count = current_user.get_repo_count()[0] - data = current_user.get_repos(page_size = count)[0] - else: - count = get_all_repos_count()[0] - data = get_all_repos(page_size=count)[0] - - return renderRepos("card", query, data, filter = True) - - """ ---------------------------------------------------------------- - groups: - This route returns the groups table view, listing all the current - groups in the backend - """ - # @server.app.route('/groups') - # @server.app.route('/groups/') - # def repo_groups_view(group=None): - # query = request.args.get('q') - # page = request.args.get('p') - - # if(group is not None): - # query = group - - # if(query is not None): - # buffer = [] - # data = requestJson("repos") - # for repo in data: - # if query == str(repo["repo_group_id"]) or query in repo["rg_name"]: - # buffer.append(repo) - # return renderRepos("table", query, buffer, page = page, pageSource = "repo_groups_view") - # else: - # groups = requestJson("repo-groups") - # return render_template('index.html', body="groups-table", title="Groups", groups=groups, query_key=query, api_url=getSetting('serving')) - - """ ---------------------------------------------------------------- - status: - This route returns the status view, which displays information - about the current status of collection in the backend - """ - @server.app.route('/status') - def status_view(): - return render_module("status", title="Status") - - """ ---------------------------------------------------------------- - login: - Under development - """ - @server.app.route('/account/login', methods=['GET', 'POST']) - def user_login(): - if request.method == 'POST': - try: - username = request.form.get('username') - remember = request.form.get('remember') is not None - password = request.form.get('password') - register = request.form.get('register') - - if username is None: - raise LoginException("A login issue occurred") - - with DatabaseSession(logger) as db_session: - user = User.get_user(db_session, username) + direction = "DESC" if rev else "ASC" - if not user and register is None: - raise LoginException("Invalid login credentials") - - # register a user - if register is not None: - if user: - raise LoginException("User already exists") - - email = request.form.get('email') - first_name = request.form.get('first_name') - last_name = request.form.get('last_name') - admin = request.form.get('admin') or False - - result = User.create_user(username, password, email, first_name, last_name, admin) - if not result[0]: - raise LoginException("An error occurred registering your account") - else: - user = User.get_user(db_session, username) - flash(result[1]["status"]) - - # Log the user in if the password is valid - if user.validate(password) and login_user(user, remember = remember): - flash(f"Welcome, {username}!") - if "login_next" in session: - return redirect(session.pop("login_next")) - return redirect(url_for('root')) - else: - print("Invalid login") - raise LoginException("Invalid login credentials") - except LoginException as e: - flash(str(e)) - return render_module('login', title="Login") - - """ ---------------------------------------------------------------- - logout: - Under development - """ - @server.app.route('/account/logout') - @login_required - def user_logout(): - logout_user() - flash("You have been logged out") - return redirect(url_for('root')) - - """ ---------------------------------------------------------------- - default: - table: - This route performs external authorization for a user - """ - @server.app.route('/user/authorize') - @login_required - def authorize_user(): - client_id = request.args.get("client_id") - state = request.args.get("state") - response_type = request.args.get("response_type") - - if not client_id or response_type != "code": - return render_message("Invalid Request", "Something went wrong. You may need to return to the previous application and make the request again.") - - with DatabaseSession(logger) as session: - - # TODO get application from client id - client = ClientApplication.get_by_id(session, client_id) - - return render_module("authorization", app = client, state = state) - - @server.app.route('/account/delete') - @login_required - def user_delete(): - if current_user.delete()[0]: - flash(f"Account {current_user.login_name} successfully removed") - logout_user() - else: - flash("An error occurred removing the account") - - return redirect(url_for("root")) - - """ ---------------------------------------------------------------- - settings: - Under development - """ - @server.app.route('/account/settings') - @login_required - def user_settings(): - return render_template("settings.j2") - - """ ---------------------------------------------------------------- - report page: - This route returns a report view of the requested repo (by ID). - """ - @server.app.route('/repos/views/repo/') - def repo_repo_view(id): - # For some reason, there is no reports definition (shouldn't be possible) - if reports is None: - return render_message("Report Definitions Missing", "You requested a report for a repo on this instance, but a definition for the report layout was not found.") - - with DatabaseSession(logger) as db_session: - - repo = Repo.get_by_id(db_session, id) - - return render_module("repo-info", reports=reports.keys(), images=reports, title="Repo", repo=repo, repo_id=id) - - """ ---------------------------------------------------------------- - default: - table: - This route returns the default view of the application, which - is currently defined as the repository table view - """ - @server.app.route('/user/group/') - @login_required - def user_group_view(): - group = request.args.get("group") - - if not group: - return render_message("No Group Specified", "You must specify a group to view this page.") - - params = {} + config = AugurConfig(logger, db_session) + pagination_offset = config.get_value("frontend", "pagination_offset") + + + if current_user.is_authenticated: + data = current_user.get_repos(page = page, sort = sorting, direction = direction)[0] + page_count = (current_user.get_repo_count()[0] or 0) // pagination_offset + else: + data = get_all_repos(page = page, sort = sorting, direction = direction)[0] + page_count = (get_all_repos_count()[0] or 0) // pagination_offset + + #if not cacheFileExists("repos.json"): + # return renderLoading("repos/views/table", query, "repos.json") + + # return renderRepos("table", query, data, sorting, rev, page, True) + return render_module("repos-table", title="Repos", repos=data, query_key=query, activePage=page, pages=page_count, offset=pagination_offset, PS="repo_table_view", reverse = rev, sorting = sorting) + +""" ---------------------------------------------------------------- +card: + This route returns the repository card view +""" +@app.route('/repos/views/card') +def repo_card_view(): + query = request.args.get('q') + if current_user.is_authenticated: + count = current_user.get_repo_count()[0] + data = current_user.get_repos(page_size = count)[0] + else: + count = get_all_repos_count()[0] + data = get_all_repos(page_size=count)[0] + + return renderRepos("card", query, data, filter = True) + +""" ---------------------------------------------------------------- +groups: + This route returns the groups table view, listing all the current + groups in the backend +""" +# @app.route('/groups') +# @app.route('/groups/') +# def repo_groups_view(group=None): +# query = request.args.get('q') +# page = request.args.get('p') + +# if(group is not None): +# query = group + +# if(query is not None): +# buffer = [] +# data = requestJson("repos") +# for repo in data: +# if query == str(repo["repo_group_id"]) or query in repo["rg_name"]: +# buffer.append(repo) +# return renderRepos("table", query, buffer, page = page, pageSource = "repo_groups_view") +# else: +# groups = requestJson("repo-groups") +# return render_template('index.html', body="groups-table", title="Groups", groups=groups, query_key=query, api_url=getSetting('serving')) + +""" ---------------------------------------------------------------- +status: + This route returns the status view, which displays information + about the current status of collection in the backend +""" +@app.route('/status') +def status_view(): + return render_module("status", title="Status") + +""" ---------------------------------------------------------------- +login: + Under development +""" +@app.route('/account/login', methods=['GET', 'POST']) +def user_login(): + if request.method == 'POST': try: - params["page"] = int(request.args.get('p') or 0) - except: - params["page"] = 1 - - if sort := request.args.get('s'): - params["sort"] = sort - - rev = request.args.get('r') - if rev is not None: - if rev == "False": - rev = False - params["direction"] = "ASC" - elif rev == "True": - rev = True - params["direction"] = "DESC" - - with DatabaseSession(logger) as db_session: - config = AugurConfig(logger, db_session) - - pagination_offset = config.get_value("frontend", "pagination_offset") - - data = current_user.get_group_repos(group, **params)[0] - page_count = (current_user.get_group_repo_count(group)[0]) or 0 - page_count //= pagination_offset - - if not data: - return render_message("Error Loading Group", "Either the group you requested does not exist, the group has no repos, or an unspecified error occurred.") - - #if not cacheFileExists("repos.json"): - # return renderLoading("repos/views/table", query, "repos.json") - - # return renderRepos("table", None, data, sort, rev, params.get("page"), True) - return render_module("user-group-repos-table", title="Repos", repos=data, query_key=None, activePage=params["page"], pages=page_count, offset=pagination_offset, PS="user_group_view", reverse = rev, sorting = params.get("sort"), group=group) - - """ ---------------------------------------------------------------- - Admin dashboard: - View the admin dashboard. - """ - @server.app.route('/dashboard') - def dashboard_view(): - empty = [ - { "title": "Placeholder", "settings": [ - { "id": "empty", - "display_name": "Empty Entry", - "value": "NULL", - "description": "There's nothing here 👻" - } - ]} - ] - - backend_config = requestJson("config/get", False) - - return render_template('admin-dashboard.j2', sections = empty, config = backend_config) + username = request.form.get('username') + remember = request.form.get('remember') is not None + password = request.form.get('password') + register = request.form.get('register') + + if username is None: + raise LoginException("A login issue occurred") + + user = User.get_user(db_session, username) + + if not user and register is None: + raise LoginException("Invalid login credentials") + + # register a user + if register is not None: + if user: + raise LoginException("User already exists") + + email = request.form.get('email') + first_name = request.form.get('first_name') + last_name = request.form.get('last_name') + admin = request.form.get('admin') or False + + result = User.create_user(username, password, email, first_name, last_name, admin) + if not result[0]: + raise LoginException("An error occurred registering your account") + else: + user = User.get_user(db_session, username) + flash(result[1]["status"]) + + # Log the user in if the password is valid + if user.validate(password) and login_user(user, remember = remember): + flash(f"Welcome, {username}!") + if "login_next" in session: + return redirect(session.pop("login_next")) + return redirect(url_for('root')) + else: + print("Invalid login") + raise LoginException("Invalid login credentials") + except LoginException as e: + flash(str(e)) + return render_module('login', title="Login") + +""" ---------------------------------------------------------------- +logout: + Under development +""" +@app.route('/account/logout') +@login_required +def user_logout(): + logout_user() + flash("You have been logged out") + return redirect(url_for('root')) + +""" ---------------------------------------------------------------- +default: +table: + This route performs external authorization for a user +""" +@app.route('/user/authorize') +@login_required +def authorize_user(): + client_id = request.args.get("client_id") + state = request.args.get("state") + response_type = request.args.get("response_type") + + if not client_id or response_type != "code": + return render_message("Invalid Request", "Something went wrong. You may need to return to the previous application and make the request again.") + + # TODO get application from client id + client = ClientApplication.get_by_id(db_session, client_id) + + return render_module("authorization", app = client, state = state) + +@app.route('/account/delete') +@login_required +def user_delete(): + if current_user.delete()[0]: + flash(f"Account {current_user.login_name} successfully removed") + logout_user() + else: + flash("An error occurred removing the account") + + return redirect(url_for("root")) + +""" ---------------------------------------------------------------- +settings: + Under development +""" +@app.route('/account/settings') +@login_required +def user_settings(): + return render_template("settings.j2") + +""" ---------------------------------------------------------------- +report page: + This route returns a report view of the requested repo (by ID). +""" +@app.route('/repos/views/repo/') +def repo_repo_view(id): + # For some reason, there is no reports definition (shouldn't be possible) + if reports is None: + return render_message("Report Definitions Missing", "You requested a report for a repo on this instance, but a definition for the report layout was not found.") + + repo = Repo.get_by_id(db_session, id) + + return render_module("repo-info", reports=reports.keys(), images=reports, title="Repo", repo=repo, repo_id=id) + +""" ---------------------------------------------------------------- +default: +table: + This route returns the default view of the application, which + is currently defined as the repository table view +""" +@app.route('/user/group/') +@login_required +def user_group_view(): + group = request.args.get("group") + + if not group: + return render_message("No Group Specified", "You must specify a group to view this page.") + + params = {} + + try: + params["page"] = int(request.args.get('p') or 0) + except: + params["page"] = 1 + + if sort := request.args.get('s'): + params["sort"] = sort + + rev = request.args.get('r') + if rev is not None: + if rev == "False": + rev = False + params["direction"] = "ASC" + elif rev == "True": + rev = True + params["direction"] = "DESC" + + config = AugurConfig(logger, db_session) + + pagination_offset = config.get_value("frontend", "pagination_offset") + + data = current_user.get_group_repos(group, **params)[0] + page_count = (current_user.get_group_repo_count(group)[0]) or 0 + page_count //= pagination_offset + + if not data: + return render_message("Error Loading Group", "Either the group you requested does not exist, the group has no repos, or an unspecified error occurred.") + + #if not cacheFileExists("repos.json"): + # return renderLoading("repos/views/table", query, "repos.json") + + # return renderRepos("table", None, data, sort, rev, params.get("page"), True) + return render_module("user-group-repos-table", title="Repos", repos=data, query_key=None, activePage=params["page"], pages=page_count, offset=pagination_offset, PS="user_group_view", reverse = rev, sorting = params.get("sort"), group=group) + +""" ---------------------------------------------------------------- +Admin dashboard: + View the admin dashboard. +""" +@app.route('/dashboard') +def dashboard_view(): + empty = [ + { "title": "Placeholder", "settings": [ + { "id": "empty", + "display_name": "Empty Entry", + "value": "NULL", + "description": "There's nothing here 👻" + } + ]} + ] + + backend_config = requestJson("config/get", False) + + return render_template('admin-dashboard.j2', sections = empty, config = backend_config) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index a3c23f4959..79478f2dfa 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -70,16 +70,14 @@ def start(disable_collection, development, port): db_session.invalidate() - gunicorn_command = f"gunicorn -c {gunicorn_location} -b {host}:{port} --preload augur.api.server:app" + gunicorn_command = f"gunicorn -c {gunicorn_location} -b {host}:{port} augur.api.server:app" server = subprocess.Popen(gunicorn_command.split(" ")) time.sleep(3) logger.info('Gunicorn webserver started...') logger.info(f'Augur is running at: http://127.0.0.1:{port}') - worker_1_process = None - cpu_worker_process = None - celery_beat_process = None + worker_process = None if not disable_collection: if os.path.exists("celerybeat-schedule.db"): @@ -111,10 +109,6 @@ def start(disable_collection, development, port): logger.info("Shutting down celery process") worker_process.terminate() - if celery_beat_process: - logger.info("Shutting down celery beat process") - celery_beat_process.terminate() - try: clear_redis_caches() connection_string = "" diff --git a/metadata.py b/metadata.py index 60ff7b1a50..525dffaad1 100644 --- a/metadata.py +++ b/metadata.py @@ -5,8 +5,8 @@ __short_description__ = "Python 3 package for free/libre and open-source software community metrics, models & data collection" -__version__ = "0.43.10" -__release__ = "v0.43.10 (Louise)" +__version__ = "0.44.0" +__release__ = "v0.44.0 (Brussel Sprouts)" __license__ = "MIT" __copyright__ = "University of Missouri, University of Nebraska-Omaha, CHAOSS, Brian Warner & Augurlabs 2023" diff --git a/tests/test_routes/test_api_functionality/test_commit_routes_api.py b/tests/test_routes/test_api_functionality/test_commit_routes_api.py index 1ec9ce69f5..9a0d1ccc91 100644 --- a/tests/test_routes/test_api_functionality/test_commit_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_commit_routes_api.py @@ -7,53 +7,53 @@ def test_annual_commit_count_ranked_by_new_repo_in_repo_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-new-repo-in-repo-group/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-commit-count-ranked-by-new-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_commit_count_ranked_by_new_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-new-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_commit_count_ranked_by_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-commit-count-ranked-by-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_commit_count_ranked_by_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-commit-count-ranked-by-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_top_committers_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/top-committers') assert response is not None - data = response.json() + assert response.status_code == 200 def test_top_committers_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/top-committers') assert response is not None - data = response.json() + assert response.status_code == 200 def test_committer_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/committers') assert response is not None - data = response.json() + assert response.status_code == 200 def test_committer_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/committers?period=year') assert response is not None - data = response.json() + assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_contributor_routes_api.py b/tests/test_routes/test_api_functionality/test_contributor_routes_api.py index 6a01f9bdfd..b34931d911 100644 --- a/tests/test_routes/test_api_functionality/test_contributor_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_contributor_routes_api.py @@ -7,24 +7,24 @@ def test_contributors_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/contributors') assert response is not None - data = response.json() + assert response.status_code == 200 def test_contributors_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/contributors') assert response is not None - data = response.json() + assert response.status_code == 200 def test_contributors_new_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/contributors-new') assert response is not None - data = response.json() + assert response.status_code == 200 def test_contributors_new_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/contributors-new') assert response is not None - data = response.json() + assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_issue_routes_api.py b/tests/test_routes/test_api_functionality/test_issue_routes_api.py index 280e12d386..619e545544 100644 --- a/tests/test_routes/test_api_functionality/test_issue_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_issue_routes_api.py @@ -8,203 +8,202 @@ def test_issues_new_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-new') assert response is not None - data = response.json() assert response.status_code == 200 def test_issues_new_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-new') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_active_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-active') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_active_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-active') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_closed_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-closed') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_closed_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-closed') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_duration_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-duration') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_duration_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-duration') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_participants_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-participants') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_participants_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-participants') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_throughput_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-throughput') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_throughput_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-throughput') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_backlog_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-backlog') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_backlog_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-backlog') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_first_time_opened_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-first-time-opened') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_first_time_opened_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-first-time-opened') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_first_time_closed_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/20/issues-first-time-closed') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_first_time_closed_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/20/repos/25430/issues-first-time-closed') assert response is not None - data = response.json() + assert response.status_code == 200 def test_open_issues_count_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/open-issues-count') assert response is not None - data = response.json() + assert response.status_code == 200 def test_open_issues_count_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/open-issues-count') assert response is not None - data = response.json() + assert response.status_code == 200 def test_closed_issues_count_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/closed-issues-count') assert response is not None - data = response.json() + assert response.status_code == 200 def test_closed_issues_count_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/closed-issues-count') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_open_age_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-open-age/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_open_age_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-open-age/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_closed_resolution_duration_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-closed-resolution-duration/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_closed_resolution_duration_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-closed-resolution-duration/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_maintainer_response_duration_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issues-maintainer-response-duration/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issues_maintainer_response_duration_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issues-maintainer-response-duration/') assert response is not None - data = response.json() + assert response.status_code == 200 def test_average_issue_resolution_time_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/average-issue-resolution-time') assert response is not None - data = response.json() + assert response.status_code == 200 def test_average_issue_resolution_time_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/average-issue-resolution-time') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_comments_mean_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-comments-mean') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_comments_mean_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-comments-mean') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_comments_mean_std_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/issue-comments-mean-std') assert response is not None - data = response.json() + assert response.status_code == 200 def test_issue_comments_mean_std_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/issue-comments-mean-std') assert response is not None - data = response.json() + assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py b/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py index fef8e3498c..133761d6f0 100644 --- a/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_pull_request_routes_api.py @@ -8,18 +8,18 @@ def test_pull_requests_merge_contributor_new_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/pull-requests-merge-contributor-new') assert response is not None - data = response.json() + assert response.status_code == 200 def test_pull_requests_merge_contributor_new_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/pull-requests-merge-contributor-new') assert response is not None - data = response.json() + assert response.status_code == 200 def test_pull_requests_closed_no_merge_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repos/25430/pull-requests-closed-no-merge') assert response is not None - data = response.json() + assert response.status_code == 200 diff --git a/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py b/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py index 9d8303109a..8e45ee91eb 100644 --- a/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py +++ b/tests/test_routes/test_api_functionality/test_repo_meta_routes_api.py @@ -8,43 +8,43 @@ def test_code_changes_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/code-changes') assert response is not None - data = response.json() + assert response.status_code == 200 def test_code_changes_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/code-changes') assert response is not None - data = response.json() + assert response.status_code == 200 def test_code_changes_lines_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/code-changes-lines') assert response is not None - data = response.json() + assert response.status_code == 200 def test_code_changes_lines_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/code-changes-lines') assert response is not None - data = response.json() + assert response.status_code == 200 def test_sub_projects_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/sub-projects') assert response is not None - data = response.json() + assert response.status_code == 200 def test_sub_projects_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/sub-projects') assert response is not None - data = response.json() + assert response.status_code == 200 def test_cii_best_practices_badge_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/cii-best-practices-badge') assert response is not None - data = response.json() + assert response.status_code == 200 def test_languages_by_group_api_is_functional(): @@ -57,60 +57,60 @@ def test_languages_by_repo_api_is_functional(): # def test_license_declared_by_group_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-declared') -# data = response.json() +# # assert response.status_code == 200 # # def test_license_declared_by_repo_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-declared') -# data = response.json() +# # assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-lines-of-code-count-ranked-by-new-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_new_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-lines-of-code-count-ranked-by-new-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_repo_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/annual-lines-of-code-count-ranked-by-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 def test_annual_lines_of_code_count_ranked_by_repo_in_repo_group_by_group_api_is_functional(): response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/annual-lines-of-code-count-ranked-by-repo-in-repo-group') assert response is not None - data = response.json() + assert response.status_code == 200 # def test_license_coverage_by_group_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-coverage') assert response is not None -# data = response.json() +# # assert response.status_code == 200 # def test_license_coverage_by_repo_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-coverage') assert response is not None -# data = response.json() +# # assert response.status_code == 200 # def test_license_count_by_group_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/license-count') assert response is not None -# data = response.json() +# # assert response.status_code == 200 # def test_license_count_by_repo_api_is_functional(): # response = requests.get(f'http://localhost:{server_port}/api/unstable/repo-groups/10/repos/25430/license-count') assert response is not None -# data = response.json() +# # assert response.status_code == 200 From ea982ebd2d31d35d0e2ba94abb4f6d27b21a5a33 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Tue, 7 Feb 2023 11:23:07 -0600 Subject: [PATCH 098/134] Add fields to collection status table Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- .../application/db/models/augur_operations.py | 9 ++++--- .../versions/5_add_collection_status_table.py | 9 ++++--- augur/tasks/start_tasks.py | 26 +++++++++---------- 3 files changed, 25 insertions(+), 19 deletions(-) diff --git a/augur/application/db/models/augur_operations.py b/augur/application/db/models/augur_operations.py index b0b05fb9cd..ab7c7d45cd 100644 --- a/augur/application/db/models/augur_operations.py +++ b/augur/application/db/models/augur_operations.py @@ -925,10 +925,13 @@ class CollectionStatus(Base): ) repo_id = Column(ForeignKey("augur_data.repo.repo_id", name="collection_status_repo_id_fk"), primary_key=True) - data_last_collected = Column(TIMESTAMP) + core_data_last_collected = Column(TIMESTAMP) + core_status = Column(String, nullable=False, server_default=text("'Pending'")) + core_task_id = Column(String) + secondary_status = Column(String, nullable=False, server_default=text("'Pending'")) + secondary_data_last_collected = Column(TIMESTAMP) + secondary_task_id = Column(String) event_last_collected = Column(TIMESTAMP) - status = Column(String, nullable=False, server_default=text("'Pending'")) - task_id = Column(String) repo = relationship("Repo", back_populates="collection_status") diff --git a/augur/application/schema/alembic/versions/5_add_collection_status_table.py b/augur/application/schema/alembic/versions/5_add_collection_status_table.py index 13f7f5f61e..a138a0e3e2 100644 --- a/augur/application/schema/alembic/versions/5_add_collection_status_table.py +++ b/augur/application/schema/alembic/versions/5_add_collection_status_table.py @@ -22,10 +22,13 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('collection_status', sa.Column('repo_id', sa.BigInteger(), nullable=False), - sa.Column('data_last_collected', postgresql.TIMESTAMP(), nullable=True), + sa.Column('core_data_last_collected', postgresql.TIMESTAMP(), nullable=True), + sa.Column('core_status', sa.String(), server_default=sa.text("'Pending'"), nullable=False), + sa.Column('core_task_id', sa.String(), nullable=True), + sa.Column('secondary_data_last_collected', postgresql.TIMESTAMP(), nullable=True), + sa.Column('secondary_status', sa.String(), server_default=sa.text("'Pending'"), nullable=False), + sa.Column('secondary_task_id', sa.String(), nullable=True), sa.Column('event_last_collected', postgresql.TIMESTAMP(), nullable=True), - sa.Column('status', sa.String(), server_default=sa.text("'Pending'"), nullable=False), - sa.Column('task_id', sa.String(), nullable=True), sa.ForeignKeyConstraint(['repo_id'], ['augur_data.repo.repo_id'], name='collection_status_repo_id_fk'), sa.PrimaryKeyConstraint('repo_id'), schema='augur_operations' diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index f01bbc4dff..70a9ff926b 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -75,9 +75,9 @@ def task_success(repo_git): collection_status = repo.collection_status[0] - collection_status.status = CollectionState.SUCCESS.value - collection_status.data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') - collection_status.task_id = None + collection_status.core_status = CollectionState.SUCCESS.value + collection_status.core_data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + collection_status.core_task_id = None session.commit() @@ -89,7 +89,7 @@ def task_failed(request,exc,traceback): logger = logging.getLogger(task_failed.__name__) with DatabaseSession(logger,engine) as session: - query = session.query(CollectionStatus).filter(CollectionStatus.task_id == request.id) + query = session.query(CollectionStatus).filter(CollectionStatus.core_task_id == request.id) collectionRecord = execute_session_query(query,'one') @@ -103,9 +103,9 @@ def task_failed(request,exc,traceback): except Exception as e: logger.error(f"Could not mutate request chain! \n Error: {e}") - if collectionRecord.status == CollectionState.COLLECTING.value: + if collectionRecord.core_status == CollectionState.COLLECTING.value: # set status to Error in db - collectionRecord.status = CollectionStatus.ERROR + collectionRecord.core_status = CollectionStatus.ERROR session.commit() # log traceback to error file @@ -243,8 +243,8 @@ def start_data_collection(self): #set status in database to collecting repoStatus = repo.collection_status[0] - repoStatus.task_id = task_id - repoStatus.status = CollectionState.COLLECTING.value + repoStatus.core_task_id = task_id + repoStatus.core_status = CollectionState.COLLECTING.value self.session.commit() @celery.task @@ -306,13 +306,13 @@ def augur_collection_monitor(): enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] enabled_phases = [phase for phase in DEFINED_PHASES_PER_REPO if phase.__name__ in enabled_phase_names] - active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value).all()) + active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.core_status == CollectionState.COLLECTING.value).all()) cutoff_date = datetime.datetime.now() - datetime.timedelta(days=days) - not_erroed = CollectionStatus.status != str(CollectionState.ERROR.value) - not_collecting = CollectionStatus.status != str(CollectionState.COLLECTING.value) - never_collected = CollectionStatus.data_last_collected == None - old_collection = CollectionStatus.data_last_collected <= cutoff_date + not_erroed = CollectionStatus.core_status != str(CollectionState.ERROR.value) + not_collecting = CollectionStatus.core_status != str(CollectionState.COLLECTING.value) + never_collected = CollectionStatus.core_data_last_collected == None + old_collection = CollectionStatus.core_data_last_collected <= cutoff_date limit = max_repo_count-active_repo_count From 93890abebf23fa0e1d019667913ba642edc76092 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Tue, 7 Feb 2023 11:45:16 -0600 Subject: [PATCH 099/134] initial changes Signed-off-by: Isaac Milarsky --- augur/application/config.py | 2 +- augur/tasks/start_tasks.py | 108 +++++++++++++++++++----------------- 2 files changed, 58 insertions(+), 52 deletions(-) diff --git a/augur/application/config.py b/augur/application/config.py index 60c1c1acc0..407caa57ef 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -100,7 +100,7 @@ def get_development_flag(): }, "Task_Routine": { "prelim_phase": 1, - "repo_collect_phase": 1, + "primary_repo_collect_phase": 1, "machine_learning_phase": 0 } } diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index f01bbc4dff..f9a68694c5 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -119,20 +119,12 @@ def task_failed(request,exc,traceback): def prelim_phase(repo_git): logger = logging.getLogger(prelim_phase.__name__) - job = None - with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - job = detect_github_repo_move.si(repo_obj.repo_git) - - - - return job + return detect_github_repo_move.si(repo_git) -def repo_collect_phase(repo_git): - logger = logging.getLogger(repo_collect_phase.__name__) +def primary_repo_collect_phase(repo_git): + logger = logging.getLogger(primary_repo_collect_phase.__name__) #Here the term issues also includes prs. This list is a bunch of chains that run in parallel to process issue data. issue_dependent_tasks = [] @@ -142,36 +134,38 @@ def repo_collect_phase(repo_git): np_clustered_array = [] #A chain is needed for each repo. - with DatabaseEngine() as engine, DatabaseSession(logger, engine) as session: - query = session.query(Repo).filter(Repo.repo_git == repo_git) - repo_obj = execute_session_query(query, 'one') - repo_git = repo_obj.repo_git + repo_info_task = collect_repo_info.si(repo_git)#collection_task_wrapper(self) - repo_info_task = collect_repo_info.si(repo_git)#collection_task_wrapper(self) + primary_repo_jobs = group( + collect_issues.si(repo_git), + collect_pull_requests.si(repo_git) + ) - primary_repo_jobs = group( - collect_issues.si(repo_git), - collect_pull_requests.si(repo_git) - ) + secondary_repo_jobs = group( + collect_events.si(repo_git),#*create_grouped_task_load(dataList=first_pass, task=collect_events).tasks, + collect_github_messages.si(repo_git),#*create_grouped_task_load(dataList=first_pass,task=collect_github_messages).tasks, + ) - secondary_repo_jobs = group( - collect_events.si(repo_git),#*create_grouped_task_load(dataList=first_pass, task=collect_events).tasks, - collect_github_messages.si(repo_git),#*create_grouped_task_load(dataList=first_pass,task=collect_github_messages).tasks, - process_pull_request_files.si(repo_git),#*create_grouped_task_load(dataList=first_pass, task=process_pull_request_files).tasks, - process_pull_request_commits.si(repo_git)#*create_grouped_task_load(dataList=first_pass, task=process_pull_request_commits).tasks - ) + repo_task_group = group( + repo_info_task, + chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), + generate_facade_chain(logger,repo_git), + collect_releases.si(repo_git) + ) - repo_task_group = group( - repo_info_task, - chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), - chain(generate_facade_chain(logger,repo_git),process_dependency_metrics.si(repo_git)), - collect_releases.si(repo_git) - ) + return repo_task_group - return repo_task_group +def secondary_repo_collect_phase(repo_git): + logger = logging.getLogger(secondary_repo_collect_phase.__name__) -DEFINED_PHASES_PER_REPO = [prelim_phase, repo_collect_phase] + repo_task_group = group( + process_pull_request_files.si(repo_git), + process_pull_request_commits.si(repo_git), + process_dependency_metrics.si(repo_git) + ) + + return repo_task_group class AugurTaskRoutine: @@ -181,11 +175,11 @@ class AugurTaskRoutine: Attributes: logger (Logger): Get logger from AugurLogger jobs_dict (dict): Dict of data collection phases to run - repos (List[int]): List of repo_ids to run collection on. + repos (List[str]): List of repo_ids to run collection on. collection_phases (List[str]): List of phases to run in augur collection. session: Database session to use """ - def __init__(self,session,repos: List[int]=[],collection_phases: List[str]=[]): + def __init__(self,session,repos: List[str]=[],collection_phases: List[str]=[]): self.logger = AugurLogger("data_collection_jobs").get_logger() #self.session = TaskSession(self.logger) self.jobs_dict = {} @@ -221,10 +215,10 @@ def start_data_collection(self): - for repo_id in self.repos: + for repo_git in self.repos: - repo = self.session.query(Repo).filter(Repo.repo_id == repo_id).one() - repo_git = repo.repo_git + repo = self.session.query(Repo).filter(Repo.repo_git == repo_git).one() + repo_id = repo.repo_id augur_collection_sequence = [] for phaseName, job in self.jobs_dict.items(): @@ -234,18 +228,15 @@ def start_data_collection(self): #The preliminary task creates the larger task chain augur_collection_sequence.append(job(repo_git)) - augur_collection_sequence.append(task_success.si(repo_git)) + #augur_collection_sequence.append(task_success.si(repo_git)) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) task_id = augur_collection_chain.apply_async(link_error=task_failed.s()).task_id - self.logger.info(f"Setting repo_id {repo_id} to collecting") + self.logger.info(f"Setting repo_id {repo_id} to collecting for repo: {repo_git}") - #set status in database to collecting - repoStatus = repo.collection_status[0] - repoStatus.task_id = task_id - repoStatus.status = CollectionState.COLLECTING.value - self.session.commit() + #yield the value of the task_id to the calling method so that the proper collectionStatus field can be updated + yield repo_git, task_id @celery.task def non_repo_domain_tasks(): @@ -277,7 +268,8 @@ def non_repo_domain_tasks(): enabled_tasks.append(machine_learning_phase.si()) tasks = chain( - *enabled_tasks + *enabled_tasks, + refresh_materialized_views.si() ) tasks.apply_async() @@ -293,6 +285,8 @@ def augur_collection_monitor(): logger.info("Checking for repos to collect") + coreCollection = [prelim_phase, primary_repo_collect_phase] + #Get phase options from the config with DatabaseSession(logger, engine) as session: @@ -304,7 +298,9 @@ def augur_collection_monitor(): #Get list of enabled phases enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] - enabled_phases = [phase for phase in DEFINED_PHASES_PER_REPO if phase.__name__ in enabled_phase_names] + enabled_phases = [phase for phase in coreCollection if phase.__name__ in enabled_phase_names] + #task success is scheduled no matter what the config says. + enabled_phases.append(task_success) active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value).all()) @@ -318,12 +314,22 @@ def augur_collection_monitor(): repo_status_list = session.query(CollectionStatus).filter(and_(not_erroed, not_collecting, or_(never_collected, old_collection))).limit(limit).all() - repo_ids = [repo.repo_id for repo in repo_status_list] + repo_git_identifiers = [repo.repo_git for repo in repo_status_list] logger.info(f"Starting collection on {len(repo_ids)} repos") - augur_collection = AugurTaskRoutine(session,repos=repo_ids,collection_phases=enabled_phases) - augur_collection.start_data_collection() + augur_collection = AugurTaskRoutine(session,repos=repo_git_identifiers,collection_phases=enabled_phases) + + #Start data collection and update the collectionStatus with the task_ids + for repo_git, task_id in augur_collection.start_data_collection(): + + repo = session.query(Repo).filter(Repo.repo_git == repo_git).one() + + #set status in database to collecting + repoStatus = repo.collection_status[0] + repoStatus.task_id = task_id + repoStatus.status = CollectionState.COLLECTING.value + self.session.commit() From 5f53e4402463ba4faa65819ad92001ca012ea23b Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Tue, 7 Feb 2023 13:43:09 -0600 Subject: [PATCH 100/134] changes to collection seperating out pr files and commits Signed-off-by: Isaac Milarsky --- augur/application/config.py | 1 + augur/tasks/start_tasks.py | 57 +++++++++++++++++++++++++++++++------ 2 files changed, 50 insertions(+), 8 deletions(-) diff --git a/augur/application/config.py b/augur/application/config.py index 407caa57ef..b8c254bb36 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -101,6 +101,7 @@ def get_development_flag(): "Task_Routine": { "prelim_phase": 1, "primary_repo_collect_phase": 1, + "secondary_repo_collect_phase": 1, "machine_learning_phase": 0 } } diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index df18a0fde9..9d6412d280 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -59,13 +59,13 @@ def collection_task_wrapper(self,*args,**kwargs): """ @celery.task -def task_success(repo_git): +def core_task_success(repo_git): from augur.tasks.init.celery_app import engine - logger = logging.getLogger(task_success.__name__) + logger = logging.getLogger(core_task_success.__name__) - logger.info(f"Repo '{repo_git}' succeeded") + logger.info(f"Repo '{repo_git}' succeeded through core collection") with DatabaseSession(logger, engine) as session: @@ -79,6 +79,32 @@ def task_success(repo_git): collection_status.core_data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') collection_status.core_task_id = None + #TODO: remove when secondary tasks are changed to start elsewhere. + collection_status.secondary_status = CollectionState.SUCCESS.value + + session.commit() + +@celery.task +def secondary_task_success(repo_git): + + from augur.tasks.init.celery_app import engine + + logger = logging.getLogger(secondary_task_success.__name__) + + logger.info(f"Repo '{repo_git}' succeeded through secondary collection") + + with DatabaseSession(logger, engine) as session: + + repo = Repo.get_by_repo_git(session, repo_git) + if not repo: + raise Exception(f"Task with repo_git of {repo_git} but could not be found in Repo table") + + collection_status = repo.collection_status[0] + + collection_status.secondary_status = CollectionState.SUCCESS.value + collection_status.secondary_data_last_collected = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + collection_status.secondary_task_id = None + session.commit() @celery.task @@ -228,7 +254,7 @@ def start_data_collection(self): #The preliminary task creates the larger task chain augur_collection_sequence.append(job(repo_git)) - #augur_collection_sequence.append(task_success.si(repo_git)) + #augur_collection_sequence.append(core_task_success.si(repo_git)) #Link all phases in a chain and send to celery augur_collection_chain = chain(*augur_collection_sequence) task_id = augur_collection_chain.apply_async(link_error=task_failed.s()).task_id @@ -298,9 +324,23 @@ def augur_collection_monitor(): #Get list of enabled phases enabled_phase_names = [name for name, phase in phase_options.items() if phase == 1] - enabled_phases = [phase for phase in coreCollection if phase.__name__ in enabled_phase_names] + #enabled_phases = [phase for phase in coreCollection if phase.__name__ in enabled_phase_names] + + enabled_phases = [] + + #Primary jobs + if prelim_phase.__name__ in enabled_phase_names: + enabled_phases.append(prelim_phase) + + if primary_repo_collect_phase.__name__ in enabled_phase_names: + enabled_phases.append(primary_repo_collect_phase) + #task success is scheduled no matter what the config says. - enabled_phases.append(task_success) + enabled_phases.append(core_task_success) + + if secondary_repo_collect_phase.__name__ in enabled_phase_names: + enabled_phases.append(secondary_repo_collect_phase) + enabled_phases.append(secondary_task_success) active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.core_status == CollectionState.COLLECTING.value).all()) @@ -327,8 +367,9 @@ def augur_collection_monitor(): #set status in database to collecting repoStatus = repo.collection_status[0] - repoStatus.task_id = task_id - repoStatus.status = CollectionState.COLLECTING.value + repoStatus.core_task_id = task_id + repoStatus.secondary_task_id = task_id + repoStatus.core_status = CollectionState.COLLECTING.value self.session.commit() From c5b4330f5577a2205a6ad1f7f6a0f62146bb3c03 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Tue, 7 Feb 2023 14:14:57 -0600 Subject: [PATCH 101/134] got it to a running state Signed-off-by: Isaac Milarsky --- augur/application/cli/backend.py | 6 ++++-- augur/tasks/start_tasks.py | 22 ++++++++++++++++++---- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 766c546552..81bf30d202 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -113,7 +113,8 @@ def start(disable_collection, development, port): create_collection_status(logger) with DatabaseSession(logger) as session: - collection_status_list = session.query(CollectionStatus).filter(CollectionStatus.status == CollectionState.COLLECTING.value) + collection_status_list = session.query(CollectionStatus).filter(CollectionStatus.core_status == CollectionState.COLLECTING.value + or CollectionStatus.secondary_status == CollectionState.COLLECTING.value) for status in collection_status_list: repo = status.repo @@ -121,7 +122,8 @@ def start(disable_collection, development, port): repo.repo_path = None repo.repo_status = "New" - collection_status_list.update({CollectionStatus.status: "Pending"}) + collection_status_list.update({CollectionStatus.core_status: "Pending"}) + collection_status_list.update({CollectionStatus.secondary_status: "Pending"}) session.commit() augur_collection_monitor.si().apply_async() diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 9d6412d280..886988d481 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -256,6 +256,7 @@ def start_data_collection(self): #augur_collection_sequence.append(core_task_success.si(repo_git)) #Link all phases in a chain and send to celery + print(augur_collection_sequence) augur_collection_chain = chain(*augur_collection_sequence) task_id = augur_collection_chain.apply_async(link_error=task_failed.s()).task_id @@ -336,11 +337,18 @@ def augur_collection_monitor(): enabled_phases.append(primary_repo_collect_phase) #task success is scheduled no matter what the config says. - enabled_phases.append(core_task_success) + def core_task_success_phase(repo_git): + return core_task_success.si(repo_git) + + enabled_phases.append(core_task_success_phase) if secondary_repo_collect_phase.__name__ in enabled_phase_names: enabled_phases.append(secondary_repo_collect_phase) - enabled_phases.append(secondary_task_success) + + def secondary_task_success_phase(repo_git): + return secondary_task_success.si(repo_git) + + enabled_phases.append(secondary_task_success_phase) active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.core_status == CollectionState.COLLECTING.value).all()) @@ -354,10 +362,16 @@ def augur_collection_monitor(): repo_status_list = session.query(CollectionStatus).filter(and_(not_erroed, not_collecting, or_(never_collected, old_collection))).limit(limit).all() - repo_git_identifiers = [repo.repo_git for repo in repo_status_list] + repo_ids = [repo.repo_id for repo in repo_status_list] + + repo_git_result = session.query(Repo).filter(Repo.repo_id.in_(tuple(repo_ids))).all() + + repo_git_identifiers = [repo.repo_git for repo in repo_git_result] logger.info(f"Starting collection on {len(repo_ids)} repos") + logger.info(f"Collection starting for: {tuple(repo_git_identifiers)}") + augur_collection = AugurTaskRoutine(session,repos=repo_git_identifiers,collection_phases=enabled_phases) #Start data collection and update the collectionStatus with the task_ids @@ -370,7 +384,7 @@ def augur_collection_monitor(): repoStatus.core_task_id = task_id repoStatus.secondary_task_id = task_id repoStatus.core_status = CollectionState.COLLECTING.value - self.session.commit() + session.commit() From 58c089be12a80ed5970d42bb02d3550562912918 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Tue, 7 Feb 2023 14:49:41 -0600 Subject: [PATCH 102/134] Syntax error from git Signed-off-by: Isaac Milarsky --- augur/tasks/git/facade_tasks.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 86bc6a3f75..d17617c93c 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -598,17 +598,17 @@ def generate_non_repo_domain_facade_tasks(logger): facade_sequence = [] - if nuke_stored_affiliations and firstRun: + if nuke_stored_affiliations: facade_sequence.append(nuke_affiliations_facade_task.si().on_error(facade_error_handler.s()))#nuke_affiliations(session.cfg) #session.logger.info(session.cfg) - if not limited_run or (limited_run and fix_affiliations) and firstRun: + if not limited_run or (limited_run and fix_affiliations): facade_sequence.append(fill_empty_affiliations_facade_task.si().on_error(facade_error_handler.s()))#fill_empty_affiliations(session) - if force_invalidate_caches and firstRun: + if force_invalidate_caches: facade_sequence.append(invalidate_caches_facade_task.si().on_error(facade_error_handler.s()))#invalidate_caches(session.cfg) - if not limited_run or (limited_run and rebuild_caches) and firstRun: + if not limited_run or (limited_run and rebuild_caches): facade_sequence.append(rebuild_unknown_affiliation_and_web_caches_facade_task.si().on_error(facade_error_handler.s()))#rebuild_unknown_affiliation_and_web_caches(session.cfg) return facade_sequence \ No newline at end of file From eb21f53941f35d675565f4e800a1b19eb5c3d386 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Tue, 7 Feb 2023 16:29:51 -0600 Subject: [PATCH 103/134] syntax Signed-off-by: Isaac Milarsky --- augur/tasks/start_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 886988d481..ea4d013791 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -80,7 +80,7 @@ def core_task_success(repo_git): collection_status.core_task_id = None #TODO: remove when secondary tasks are changed to start elsewhere. - collection_status.secondary_status = CollectionState.SUCCESS.value + collection_status.secondary_status = CollectionState.COLLECTING.value session.commit() From 80c6a1d92d3e9366e492fad638bf27ef9a39344c Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Tue, 7 Feb 2023 16:33:16 -0600 Subject: [PATCH 104/134] Add worker for secondary tasks Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/cli/backend.py | 23 +++++++++++++++-------- augur/tasks/init/celery_app.py | 5 ++++- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 81bf30d202..6144cdc0e7 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -94,7 +94,8 @@ def start(disable_collection, development, port): logger.info(f'Augur is running at: http://127.0.0.1:{port}') scheduling_worker_process = None - worker_1_process = None + core_worker_process = None + secondary_worker_process = None celery_beat_process = None if not disable_collection: @@ -103,10 +104,12 @@ def start(disable_collection, development, port): os.remove("celerybeat-schedule.db") scheduling_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=1 -n scheduling:{uuid.uuid4().hex}@%h -Q scheduling" - worker_1 = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=20 -n {uuid.uuid4().hex}@%h" + core_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=14 -n core:{uuid.uuid4().hex}@%h" + secondary_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=5 -n secondary:{uuid.uuid4().hex}@%h -Q secondary" scheduling_worker_process = subprocess.Popen(scheduling_worker.split(" ")) - worker_1_process = subprocess.Popen(worker_1.split(" ")) + core_worker_process = subprocess.Popen(core_worker.split(" ")) + secondary_worker_process = subprocess.Popen(secondary_worker.split(" ")) time.sleep(5) @@ -135,21 +138,25 @@ def start(disable_collection, development, port): logger.info("Collection disabled") try: - worker_1_process.wait() + core_worker_process.wait() except KeyboardInterrupt: if server: logger.info("Shutting down server") server.terminate() - if worker_1_process: - logger.info("Shutting down celery process") - worker_1_process.terminate() + if core_worker_process: + logger.info("Shutting down celery process: core") + core_worker_process.terminate() if scheduling_worker_process: - logger.info("Shutting down celery process") + logger.info("Shutting down celery process: scheduling") scheduling_worker_process.terminate() + if secondary_worker_process: + logger.info("Shutting down celery process: secondary") + secondary_worker_process.terminate() + if celery_beat_process: logger.info("Shutting down celery beat process") celery_beat_process.terminate() diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index bb8a2b7758..0dee1a4851 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -59,7 +59,10 @@ # define the queues that tasks will be put in (by default tasks are put in celery queue) celery_app.conf.task_routes = { - 'augur.tasks.start_tasks.*': {'queue': 'scheduling'} + 'augur.tasks.start_tasks.*': {'queue': 'scheduling'}, + 'augur.tasks.github.pull_requests.commits_model.tasks.py': {'queue': 'secondary'}, + 'augur.tasks.github.pull_requests.files_model.tasks.py': {'queue': 'secondary'}, + 'augur.tasks.git.dependency_tasks.tasks.py': {'queue': 'secondary'} } #Setting to be able to see more detailed states of running tasks From cd0e561ae5d37b4f04e9bae75f33811e12fc6066 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 9 Feb 2023 17:17:03 -0600 Subject: [PATCH 105/134] Fix task routing Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/cli/backend.py | 2 +- augur/tasks/init/celery_app.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index 6144cdc0e7..be2582320e 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -138,7 +138,7 @@ def start(disable_collection, development, port): logger.info("Collection disabled") try: - core_worker_process.wait() + server.wait() except KeyboardInterrupt: if server: diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 0dee1a4851..a2ac88f347 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -60,9 +60,9 @@ # define the queues that tasks will be put in (by default tasks are put in celery queue) celery_app.conf.task_routes = { 'augur.tasks.start_tasks.*': {'queue': 'scheduling'}, - 'augur.tasks.github.pull_requests.commits_model.tasks.py': {'queue': 'secondary'}, - 'augur.tasks.github.pull_requests.files_model.tasks.py': {'queue': 'secondary'}, - 'augur.tasks.git.dependency_tasks.tasks.py': {'queue': 'secondary'} + 'augur.tasks.github.pull_requests.commits_model.tasks.*': {'queue': 'secondary'}, + 'augur.tasks.github.pull_requests.files_model.tasks.*': {'queue': 'secondary'}, + 'augur.tasks.git.dependency_tasks.tasks.*': {'queue': 'secondary'} } #Setting to be able to see more detailed states of running tasks From c426225ebed6a99af1f7c7a2aaa67a1a94f22846 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Thu, 9 Feb 2023 17:34:48 -0600 Subject: [PATCH 106/134] Add config update to schema script Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- .../versions/5_add_collection_status_table.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/augur/application/schema/alembic/versions/5_add_collection_status_table.py b/augur/application/schema/alembic/versions/5_add_collection_status_table.py index a138a0e3e2..b233049368 100644 --- a/augur/application/schema/alembic/versions/5_add_collection_status_table.py +++ b/augur/application/schema/alembic/versions/5_add_collection_status_table.py @@ -49,6 +49,19 @@ def upgrade(): AND setting_name = 'collection_interval'; """)) + # if the database has the old repo_collect phase then add delete it and add these rows otherwise just let the rows be added in the config during making install + result = conn.execute(text("""SELECT * FROM augur_operations.config WHERE section_name='Task_Routine' and setting_name='repo_collect_phase';""")).fetchall() + if result: + print(result[0]) + print(dict(result[0])) + value = dict(result[0])["value"] + + conn.execute(text(f"""DELETE FROM augur_operations.config where section_name='Task_Routine' and setting_name='repo_collect_phase'; + INSERT INTO "augur_operations"."config" ("section_name", "setting_name", "value", "type") VALUES ('Task_Routine', 'secondary_repo_collect_phase', '{value}', 'int'); + INSERT INTO "augur_operations"."config" ("section_name", "setting_name", "value", "type") VALUES ('Task_Routine', 'primary_repo_collect_phase', '{value}', 'int'); + """)) + + # ### end Alembic commands ### From e8d72af5f14e7a481b55fd50474db2e0e01cb858 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 9 Feb 2023 17:51:54 -0600 Subject: [PATCH 107/134] Remove unneccesary wait Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/__init__.py | 8 +------- augur/tasks/start_tasks.py | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 5629644f98..0366e80b31 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -5,7 +5,6 @@ from augur.tasks.init.celery_app import celery_app as celery import logging -@celery.task def machine_learning_phase(): from augur.tasks.data_analysis.clustering_worker.tasks import clustering_model from augur.tasks.data_analysis.contributor_breadth_worker.contributor_breadth_worker import contributor_breadth_model @@ -41,9 +40,4 @@ def machine_learning_phase(): ml_tasks.extend(pull_request_analysis_tasks) ml_tasks.extend(clustering_tasks) - task_chain = chain(*ml_tasks) - - result = task_chain.apply_async() - with allow_join_result(): - return result.get() - #return task_chain \ No newline at end of file + return ml_tasks \ No newline at end of file diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index ea4d013791..5132c7c6be 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -292,7 +292,7 @@ def non_repo_domain_tasks(): enabled_tasks.extend(generate_non_repo_domain_facade_tasks(logger)) if machine_learning_phase.__name__ in enabled_phase_names: - enabled_tasks.append(machine_learning_phase.si()) + enabled_tasks.extend(machine_learning_phase()) tasks = chain( *enabled_tasks, From 6fcbd793d920c00b8d10e7536af310a8b2f5bcaa Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 9 Feb 2023 18:12:19 -0600 Subject: [PATCH 108/134] better ml task scheduling Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/__init__.py | 26 ++++--------------- .../data_analysis/clustering_worker/tasks.py | 14 ++++++++-- .../data_analysis/discourse_analysis/tasks.py | 14 ++++++++-- .../data_analysis/insight_worker/tasks.py | 13 ++++++++-- .../data_analysis/message_insights/tasks.py | 15 +++++++++-- .../pull_request_analysis_worker/tasks.py | 18 ++++++++++--- 6 files changed, 68 insertions(+), 32 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 0366e80b31..44270e8afd 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -17,27 +17,11 @@ def machine_learning_phase(): logger = logging.getLogger(machine_learning_phase.__name__) - with DatabaseSession(logger, engine) as session: - query = session.query(Repo) - repos = execute_session_query(query, 'all') - ml_tasks = [] - clustering_tasks = [] - discourse_tasks = [] - insight_tasks = [] - message_insights_tasks = [] - pull_request_analysis_tasks = [] - for repo in repos: - clustering_tasks.append(clustering_model.si(repo.repo_git)) - discourse_tasks.append(discourse_analysis_model.si(repo.repo_git)) - insight_tasks.append(insight_model.si(repo.repo_git)) - message_insights_tasks.append(message_insight_model.si(repo.repo_git)) - pull_request_analysis_tasks.append(pull_request_analysis_model.si(repo.repo_git)) - - ml_tasks.extend(insight_tasks) - ml_tasks.extend(discourse_tasks) - ml_tasks.extend(message_insights_tasks) - ml_tasks.extend(pull_request_analysis_tasks) - ml_tasks.extend(clustering_tasks) + ml_tasks.append(clutering_task.si()) + ml_tasks.append(discourse_analysis_task.si()) + ml_tasks.append(insight_task.si()) + ml_tasks.append(message_insight_task.si()) + ml_tasks.append(pull_request_analysis_task.si()) return ml_tasks \ No newline at end of file diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index 91f01912ef..dada123680 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -32,11 +32,21 @@ @celery.task -def clustering_model(repo_git: str) -> None: +def clutering_task(): + logger = logging.getLogger(clustering_model.__name__) from augur.tasks.init.celery_app import engine - logger = logging.getLogger(clustering_model.__name__) + with DatabaseSession(logger, engine) as session: + query = session.query(Repo) + repos = execute_session_query(query, 'all') + + + for repo in repos: + clustering_model(repo.repo_git, logger, engine) + + +def clustering_model(repo_git: str,logger,engine) -> None: logger.info(f"Starting clustering analysis for {repo_git}") diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index c14d9146da..ac82d3987b 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -32,11 +32,21 @@ DISCOURSE_ANALYSIS_DIR = "augur/tasks/data_analysis/discourse_analysis/" @celery.task -def discourse_analysis_model(repo_git: str) -> None: +def discourse_analysis_task(): + logger = logging.getLogger(discourse_analysis_task.__name__) from augur.tasks.init.celery_app import engine - logger = logging.getLogger(discourse_analysis_model.__name__) + with DatabaseSession(logger, engine) as session: + query = session.query(Repo) + repos = execute_session_query(query, 'all') + + + for repo in repos: + discourse_analysis_model(repo.repo_git, logger, engine) + + +def discourse_analysis_model(repo_git: str,logger,engine) -> None: tool_source = 'Discourse Worker' tool_version = '0.1.0' diff --git a/augur/tasks/data_analysis/insight_worker/tasks.py b/augur/tasks/data_analysis/insight_worker/tasks.py index 334f67510d..71470a9cb3 100644 --- a/augur/tasks/data_analysis/insight_worker/tasks.py +++ b/augur/tasks/data_analysis/insight_worker/tasks.py @@ -24,12 +24,21 @@ @celery.task -def insight_model(repo_git: str) -> None: +def insight_task(): + logger = logging.getLogger(insight_task.__name__) from augur.tasks.init.celery_app import engine - logger = logging.getLogger(insight_model.__name__) + with DatabaseSession(logger, engine) as session: + query = session.query(Repo) + repos = execute_session_query(query, 'all') + + + for repo in repos: + insight_model(repo.repo_git, logger, engine) + +def insight_model(repo_git: str,logger,engine) -> None: refresh = True send_insights = True diff --git a/augur/tasks/data_analysis/message_insights/tasks.py b/augur/tasks/data_analysis/message_insights/tasks.py index 5377be36db..2339ee0e92 100644 --- a/augur/tasks/data_analysis/message_insights/tasks.py +++ b/augur/tasks/data_analysis/message_insights/tasks.py @@ -23,11 +23,22 @@ ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) @celery.task -def message_insight_model(repo_git: str) -> None: +def message_insight_task(): + logger = logging.getLogger(message_insight_task.__name__) from augur.tasks.init.celery_app import engine - logger = logging.getLogger(message_insight_model.__name__) + with DatabaseSession(logger, engine) as session: + query = session.query(Repo) + repos = execute_session_query(query, 'all') + + + for repo in repos: + message_insight_model(repo.repo_git, logger, engine) + + + +def message_insight_model(repo_git: str,logger,engine) -> None: full_train = True begin_date = '' diff --git a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py index 76b0514964..2fe970311d 100644 --- a/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py +++ b/augur/tasks/data_analysis/pull_request_analysis_worker/tasks.py @@ -22,11 +22,23 @@ ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) @celery.task -def pull_request_analysis_model(repo_git: str) -> None: +def pull_request_analysis_task(): - from augur.tasks.init.celery_app import engine + logger = logging.getLogger(pull_request_analysis_task.__name__) + from augur.tasks.init.celery_app import engine + + with DatabaseSession(logger, engine) as session: + query = session.query(Repo) + repos = execute_session_query(query, 'all') + + + for repo in repos: + pull_request_analysis_model(repo.repo_git, logger, engine) + + + +def pull_request_analysis_model(repo_git: str,logger,engine) -> None: - logger = logging.getLogger(pull_request_analysis_model.__name__) tool_source = 'Pull Request Analysis Worker' tool_version = '0.0.0' From 7d9dabdd376632a7498a4a66577b6eb9ed0b1b79 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Thu, 9 Feb 2023 18:16:31 -0600 Subject: [PATCH 109/134] fix typo and imports Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/__init__.py | 13 +++++++------ .../tasks/data_analysis/clustering_worker/tasks.py | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 44270e8afd..10049bc6cb 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -6,22 +6,23 @@ import logging def machine_learning_phase(): - from augur.tasks.data_analysis.clustering_worker.tasks import clustering_model + from augur.tasks.data_analysis.clustering_worker.tasks import clustering_task from augur.tasks.data_analysis.contributor_breadth_worker.contributor_breadth_worker import contributor_breadth_model - from augur.tasks.data_analysis.discourse_analysis.tasks import discourse_analysis_model - from augur.tasks.data_analysis.insight_worker.tasks import insight_model - from augur.tasks.data_analysis.message_insights.tasks import message_insight_model - from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_model + from augur.tasks.data_analysis.discourse_analysis.tasks import discourse_analysis_task + from augur.tasks.data_analysis.insight_worker.tasks import insight_task + from augur.tasks.data_analysis.message_insights.tasks import message_insight_task + from augur.tasks.data_analysis.pull_request_analysis_worker.tasks import pull_request_analysis_task from augur.tasks.init.celery_app import engine logger = logging.getLogger(machine_learning_phase.__name__) ml_tasks = [] - ml_tasks.append(clutering_task.si()) + ml_tasks.append(clustering_task.si()) ml_tasks.append(discourse_analysis_task.si()) ml_tasks.append(insight_task.si()) ml_tasks.append(message_insight_task.si()) ml_tasks.append(pull_request_analysis_task.si()) + ml_tasks.append(contributor_breadth_worker.si()) return ml_tasks \ No newline at end of file diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index dada123680..f8c2b8b02f 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -32,7 +32,7 @@ @celery.task -def clutering_task(): +def clustering_task(): logger = logging.getLogger(clustering_model.__name__) from augur.tasks.init.celery_app import engine From 99232ac76ae3b3d9da6d01d422e35026ea8936a6 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 10 Feb 2023 10:10:17 -0600 Subject: [PATCH 110/134] sql statement exec and typo Signed-off-by: Isaac Milarsky --- augur/application/db/session.py | 4 ++-- augur/tasks/data_analysis/__init__.py | 2 +- .../util/facade_worker/facade_worker/facade05repofetch.py | 6 +----- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/augur/application/db/session.py b/augur/application/db/session.py index 2bf037108d..bb5ffe65e6 100644 --- a/augur/application/db/session.py +++ b/augur/application/db/session.py @@ -88,14 +88,14 @@ def execute_sql(self, sql_text): with EngineConnection(self.engine) as connection: return_data = connection.execute(sql_text) - return return_data + return return_data def fetchall_data_from_sql_text(self,sql_text): with EngineConnection(self.engine) as connection: result = connection.execute(sql_text) .fetchall() - return [dict(zip(row.keys(), row)) for row in result] + return [dict(zip(row.keys(), row)) for row in result] def insert_data(self, data: Union[List[dict], dict], table, natural_keys: List[str], return_columns: Optional[List[str]] = None, string_fields: Optional[List[str]] = None, on_conflict_update:bool = True) -> Optional[List[dict]]: diff --git a/augur/tasks/data_analysis/__init__.py b/augur/tasks/data_analysis/__init__.py index 10049bc6cb..1ecbec05fb 100644 --- a/augur/tasks/data_analysis/__init__.py +++ b/augur/tasks/data_analysis/__init__.py @@ -23,6 +23,6 @@ def machine_learning_phase(): ml_tasks.append(insight_task.si()) ml_tasks.append(message_insight_task.si()) ml_tasks.append(pull_request_analysis_task.si()) - ml_tasks.append(contributor_breadth_worker.si()) + ml_tasks.append(contributor_breadth_model.si()) return ml_tasks \ No newline at end of file diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index b6667b2473..98a355d1db 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -130,10 +130,6 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): # Create the prerequisite directories return_code = subprocess.Popen([f"mkdir -p {repo_path}"],shell=True).wait() -# session.log_activity('Info','Return code value when making directors from facade05, line 120: {:d}'.format(return_code)) - - - # Make sure it's ok to proceed if return_code != 0: print("COULD NOT CREATE REPO DIRECTORY") @@ -188,7 +184,7 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): session.log_activity('Error',f"Could not clone {git}") - session.log_activity('Info', f"Fetching new repos (complete)") + session.log_activity('Info', f"Fetching new repo (complete)") def check_for_repo_updates(session,repo_git_identifiers): From b58867b869b1bf81bd0546a09cdc8122a3a041ed Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 10 Feb 2023 10:19:50 -0600 Subject: [PATCH 111/134] Fix success task phases Signed-off-by: Andrew Brain --- augur/application/cli/collection.py | 88 +++++++++++++++++++++++++++++ augur/tasks/start_tasks.py | 8 +-- 2 files changed, 92 insertions(+), 4 deletions(-) create mode 100644 augur/application/cli/collection.py diff --git a/augur/application/cli/collection.py b/augur/application/cli/collection.py new file mode 100644 index 0000000000..ad7533eebc --- /dev/null +++ b/augur/application/cli/collection.py @@ -0,0 +1,88 @@ +#SPDX-License-Identifier: MIT +""" +Augur library commands for controlling the backend components +""" +import resource +import os +import time +import subprocess +import click +import logging +import psutil +import signal +import sys +from redis.exceptions import ConnectionError as RedisConnectionError +from celery import chain, signature, group +import uuid +import traceback +from sqlalchemy import update + + +from augur import instance_id +from augur.tasks.start_tasks import augur_collection_monitor, CollectionState +from augur.tasks.init.redis_connection import redis_connection +from augur.application.db.models import Repo, CollectionStatus +from augur.application.db.session import DatabaseSession +from augur.application.db.util import execute_session_query + +from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig +from augur.application.cli import test_connection, test_db_connection +from augur.tasks.github.util.github_random_key_auth import GithubRandomKeyAuth +from augur.tasks.github.util.github_paginator import hit_api + +import sqlalchemy as s + + +logger = AugurLogger("augur", reset_logfiles=True).get_logger() + +def get_page_count() + + +def check_collection(owner, repo, key_manager, session): + + # prs + pr_url = f"https://api.github.com/repos/{owner}/{repo}/pulls?state=all&direction=desc" + prs = hit_api(key_manager, pr_url, logger) + + + # issues + # issue_url = "" + # issues = hit_api(key_manager, issue_url, logger) + + # # messages + # message_url = "" + # messages = hit_api(key_manager, message_url, logger) + + # # events + # event_url = "" + # events = hit_api(key_manager, event_url, logger) + + return True, True, True, True + + +@click.group('collection', short_help='Commands for controlling the backend API server & data collection workers') +def cli(): + pass + +@cli.command("status") +@click.option("--failed", is_flag=True, default=False, help="Only shows repos that failed") +@test_connection +@test_db_connection +def status(failed): + + with DatabaseSession(logger) as session: + + key_manager = GithubRandomKeyAuth(session) + + query = session.query(Repo) + repos = execute_session_query(query, 'all') + + for repo in repos: + + repo_git = repo.repo_git + + + + + \ No newline at end of file diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 5132c7c6be..096a876809 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -337,18 +337,18 @@ def augur_collection_monitor(): enabled_phases.append(primary_repo_collect_phase) #task success is scheduled no matter what the config says. - def core_task_success_phase(repo_git): + def core_task_success_gen(repo_git): return core_task_success.si(repo_git) - enabled_phases.append(core_task_success_phase) + enabled_phases.append(core_task_success_gen) if secondary_repo_collect_phase.__name__ in enabled_phase_names: enabled_phases.append(secondary_repo_collect_phase) - def secondary_task_success_phase(repo_git): + def secondary_task_success_gen(repo_git): return secondary_task_success.si(repo_git) - enabled_phases.append(secondary_task_success_phase) + enabled_phases.append(secondary_task_success_gen) active_repo_count = len(session.query(CollectionStatus).filter(CollectionStatus.core_status == CollectionState.COLLECTING.value).all()) From f1230afaa060be4551454629f1118c1894447075 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 10 Feb 2023 11:22:36 -0600 Subject: [PATCH 112/134] Raise exception when repo_path or name is None Signed-off-by: Andrew Brain --- .../facade_worker/facade05repofetch.py | 171 +++++++++--------- 1 file changed, 90 insertions(+), 81 deletions(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index 98a355d1db..932c0a73d2 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -130,6 +130,10 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): # Create the prerequisite directories return_code = subprocess.Popen([f"mkdir -p {repo_path}"],shell=True).wait() +# session.log_activity('Info','Return code value when making directors from facade05, line 120: {:d}'.format(return_code)) + + + # Make sure it's ok to proceed if return_code != 0: print("COULD NOT CREATE REPO DIRECTORY") @@ -184,7 +188,7 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): session.log_activity('Error',f"Could not clone {git}") - session.log_activity('Info', f"Fetching new repo (complete)") + session.log_activity('Info', f"Fetching new repos (complete)") def check_for_repo_updates(session,repo_git_identifiers): @@ -297,31 +301,36 @@ def git_repo_updates(session,repo_git): Repo.repo_git == repo_git,Repo.repo_status == 'Update') result = execute_session_query(query, 'all') - existing_repos = convert_orm_list_to_dict_list(result)#session.fetchall_data_from_sql_text(query)#list(cfg.cursor) + try: + row = convert_orm_list_to_dict_list(result)[0]#session.fetchall_data_from_sql_text(query)#list(cfg.cursor) + except IndexError: + raise Exception(f"Repo git: {repo_git} does not exist or the status is not 'Update'") + + if not row["repo_path"] or not row["repo_name"]: + raise Exception(f"The repo path or repo name is NULL for repo_id: {row['repo_id']}") + + session.log_activity('Verbose',f"Attempting to update {row['repo_git']}")#['git']) + update_repo_log(session, row['repo_id'],'Updating')#['id'],'Updating') - for row in existing_repos: - session.log_activity('Verbose',f"Attempting to update {row['repo_git']}")#['git']) - update_repo_log(session, row['repo_id'],'Updating')#['id'],'Updating') + attempt = 0 - attempt = 0 + # Try two times. If it fails the first time, reset and clean the git repo, + # as somebody may have done a rebase. No work is being done in the local + # repo, so there shouldn't be legit local changes to worry about. - # Try two times. If it fails the first time, reset and clean the git repo, - # as somebody may have done a rebase. No work is being done in the local - # repo, so there shouldn't be legit local changes to worry about. + #default_branch = '' - #default_branch = '' + while attempt < 2: - while attempt < 2: + try: - try: + firstpull = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") - firstpull = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") + return_code_remote = subprocess.Popen([firstpull],shell=True).wait() - return_code_remote = subprocess.Popen([firstpull],shell=True).wait() + session.log_activity('Verbose', 'Got to here. 1.') - session.log_activity('Verbose', 'Got to here. 1.') - - if return_code_remote == 0: + if return_code_remote == 0: # logremotedefault = ("git -C %s%s/%s%s remote set-head origin -a" # % (session.repo_base_directory,row[1],row[4],row[3])) @@ -330,45 +339,45 @@ def git_repo_updates(session,repo_git): # session.log_activity('Verbose', f'remote default is {logremotedefault}.') - getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} remote show origin | sed -n '/HEAD branch/s/.*: //p'") + getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} remote show origin | sed -n '/HEAD branch/s/.*: //p'") - return_code_remote = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE, shell=True).wait() + return_code_remote = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE, shell=True).wait() - remotedefault = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).communicate()[0] + remotedefault = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).communicate()[0] - remotedefault = remotedefault.decode() + remotedefault = remotedefault.decode() - session.log_activity('Verbose', f'remote default getting checked out is: {remotedefault}.') + session.log_activity('Verbose', f'remote default getting checked out is: {remotedefault}.') - getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} checkout {remotedefault}") + getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} checkout {remotedefault}") - session.log_activity('Verbose', f"get remote default command is: \n \n {getremotedefault} \n \n ") + session.log_activity('Verbose', f"get remote default command is: \n \n {getremotedefault} \n \n ") - return_code_remote_default_again = subprocess.Popen([getremotedefault],shell=True).wait() + return_code_remote_default_again = subprocess.Popen([getremotedefault],shell=True).wait() - if return_code_remote_default_again == 0: - session.log_activity('Verbose', "local checkout worked.") - cmd = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") + if return_code_remote_default_again == 0: + session.log_activity('Verbose', "local checkout worked.") + cmd = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") - return_code = subprocess.Popen([cmd],shell=True).wait() + return_code = subprocess.Popen([cmd],shell=True).wait() - except Exception as e: - session.log_activity('Verbose', f'Error code on branch change is {e}.') - pass + except Exception as e: + session.log_activity('Verbose', f'Error code on branch change is {e}.') + pass - finally: + finally: - cmd = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") + cmd = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") - return_code = subprocess.Popen([cmd],shell=True).wait() + return_code = subprocess.Popen([cmd],shell=True).wait() - # If the attempt succeeded, then don't try any further fixes. If - # the attempt to fix things failed, give up and try next time. - if return_code == 0 or attempt == 2: - break + # If the attempt succeeded, then don't try any further fixes. If + # the attempt to fix things failed, give up and try next time. + if return_code == 0 or attempt == 2: + break - elif attempt == 0: - session.log_activity('Verbose',f"git pull failed, attempting reset and clean for {row['repo_git']}") + elif attempt == 0: + session.log_activity('Verbose',f"git pull failed, attempting reset and clean for {row['repo_git']}") # remotedefault = 'main' @@ -379,76 +388,76 @@ def git_repo_updates(session,repo_git): # session.log_activity('Verbose', f'remote default is {logremotedefault}.') - getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} remote show origin | sed -n '/HEAD branch/s/.*: //p'") + getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} remote show origin | sed -n '/HEAD branch/s/.*: //p'") - return_code_remote = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).wait() + return_code_remote = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).wait() - remotedefault = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).communicate()[0] + remotedefault = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).communicate()[0] - remotedefault = remotedefault.decode() + remotedefault = remotedefault.decode() - try: + try: - getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} checkout {remotedefault}") + getremotedefault = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} checkout {remotedefault}") - return_code_remote_default = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).wait() + return_code_remote_default = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).wait() - return_message_getremotedefault = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).communicate()[0] + return_message_getremotedefault = subprocess.Popen([getremotedefault],stdout=subprocess.PIPE,shell=True).communicate()[0] - session.log_activity('Verbose', f'get remote default result: {return_message_getremotedefault}') + session.log_activity('Verbose', f'get remote default result: {return_message_getremotedefault}') - getcurrentbranch = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} branch") + getcurrentbranch = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} branch") - return_code_local = subprocess.Popen([getcurrentbranch],stdout=subprocess.PIPE,shell=True).wait() + return_code_local = subprocess.Popen([getcurrentbranch],stdout=subprocess.PIPE,shell=True).wait() - localdefault = subprocess.Popen([getcurrentbranch],stdout=subprocess.PIPE,shell=True).communicate()[0] + localdefault = subprocess.Popen([getcurrentbranch],stdout=subprocess.PIPE,shell=True).communicate()[0] - localdefault = localdefault.decode() + localdefault = localdefault.decode() - session.log_activity('Verbose', f'remote default is: {remotedefault}, and localdefault is {localdefault}.') + session.log_activity('Verbose', f'remote default is: {remotedefault}, and localdefault is {localdefault}.') - cmd_checkout_default = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} checkout {remotedefault}") + cmd_checkout_default = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} checkout {remotedefault}") - cmd_checkout_default_wait = subprocess.Popen([cmd_checkout_default],shell=True).wait() + cmd_checkout_default_wait = subprocess.Popen([cmd_checkout_default],shell=True).wait() - cmdpull2 = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") + cmdpull2 = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") - cmd_reset = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} reset --hard origin") + cmd_reset = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} reset --hard origin") - cmd_reset_wait = subprocess.Popen([cmd_reset],shell=True).wait() + cmd_reset_wait = subprocess.Popen([cmd_reset],shell=True).wait() - cmd_clean = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} clean -df") + cmd_clean = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} clean -df") - return_code_clean = subprocess.Popen([cmd_clean],shell=True).wait() + return_code_clean = subprocess.Popen([cmd_clean],shell=True).wait() - except Exception as e: + except Exception as e: - session.log_activity('Verbose', f'Second pass failed: {e}.') - pass + session.log_activity('Verbose', f'Second pass failed: {e}.') + pass - cmdpull2 = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") - - print(cmdpull2) - return_code = subprocess.Popen([cmdpull2],shell=True).wait() + cmdpull2 = (f"git -C {session.repo_base_directory}{row['repo_group_id']}/{row['repo_path']}{row['repo_name']} pull") + + print(cmdpull2) + return_code = subprocess.Popen([cmdpull2],shell=True).wait() - attempt += 1 - - #default_branch = '' + attempt += 1 - if return_code == 0: + #default_branch = '' - set_to_analyze = s.sql.text("""UPDATE repo SET repo_status='Analyze' WHERE repo_id=:repo_id and repo_status != 'Empty' - """).bindparams(repo_id=row['repo_id']) - session.execute_sql(set_to_analyze) + if return_code == 0: - update_repo_log(session, row['repo_id'],'Up-to-date') - session.log_activity('Verbose',f"Updated {row['repo_git']}") + set_to_analyze = s.sql.text("""UPDATE repo SET repo_status='Analyze' WHERE repo_id=:repo_id and repo_status != 'Empty' + """).bindparams(repo_id=row['repo_id']) + session.execute_sql(set_to_analyze) - else: + update_repo_log(session, row['repo_id'],'Up-to-date') + session.log_activity('Verbose',f"Updated {row['repo_git']}") - update_repo_log(session, row['repo_id'],f"Failed ({return_code})") - session.log_activity('Error',f"Could not update {row['repo_git']}" ) + else: + + update_repo_log(session, row['repo_id'],f"Failed ({return_code})") + session.log_activity('Error',f"Could not update {row['repo_git']}" ) session.log_activity('Info','Updating existing repos (complete)') From be38b40f982c91c0488bc8e19e5ee5dd9da0a333 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 10 Feb 2023 13:09:09 -0600 Subject: [PATCH 113/134] Fix query that was not filtering by repo Signed-off-by: Andrew Brain --- .../git/util/facade_worker/facade_worker/facade05repofetch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index 932c0a73d2..393fd7d6c8 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -164,7 +164,7 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): # circumstances caches are rebuilt only once per waiting period. update_project_status = s.sql.text("""UPDATE repo SET repo_status='Update' WHERE - repo_group_id=:repo_group_id AND repo_status != 'Empty'""").bindparams(repo_group_id=row['repo_group_id']) + repo_group_id=:repo_group_id AND repo_status != 'Empty' AND repo_id=:repo_id""").bindparams(repo_group_id=row['repo_group_id'], repo_id=row["repo_id"]) session.execute_sql(update_project_status) # Since we just cloned the new repo, set it straight to analyze. From f12488d44fbe5798fc59edc61a50c4279967c0a6 Mon Sep 17 00:00:00 2001 From: "Sean P. Goggins" Date: Fri, 10 Feb 2023 14:19:55 -0600 Subject: [PATCH 114/134] updates for debugging --- .../clustering_worker/kmeans_repo_messages | Bin 0 -> 33012 bytes .../data_analysis/clustering_worker/lda_model | Bin 0 -> 83683 bytes .../data_analysis/clustering_worker/vocabulary | Bin 0 -> 48192 bytes .../clustering_worker/vocabulary_count | Bin 0 -> 45614 bytes .../facade_worker/facade05repofetch.py | 8 +++++++- augur/tasks/start_tasks.py | 2 +- 6 files changed, 8 insertions(+), 2 deletions(-) create mode 100755 augur/tasks/data_analysis/clustering_worker/kmeans_repo_messages create mode 100755 augur/tasks/data_analysis/clustering_worker/lda_model create mode 100755 augur/tasks/data_analysis/clustering_worker/vocabulary create mode 100755 augur/tasks/data_analysis/clustering_worker/vocabulary_count diff --git a/augur/tasks/data_analysis/clustering_worker/kmeans_repo_messages b/augur/tasks/data_analysis/clustering_worker/kmeans_repo_messages new file mode 100755 index 0000000000000000000000000000000000000000..0ac6f4fbae1c48834ccc9d7b182f055090e6319b GIT binary patch literal 33012 zcmb5Vby$@_*EcF1f=CFcpeP|ock`pAI|ZaWH{IPOB_N`R3P=lLU;zeV5-JK}AfSSn z7$~S`pXYt|=DHrwcYW944}b3aUNf`Snz?7itVyNvj0g(;Fcas6Mq2m+NcwF5Qrk$;GPB!Sw2f%xA*@&B|H z6(!I(&=Y?LyT`iv6FVi)nu<|4P!XR*h6WMn_F#*`t9Lv);tm|dM`7XKo}s~EQIX!R zUj7l0?jfGu5d`}GbnPA$8A@Pqpe0g;xc+AxMq@??I^xf0?{JUM2yeoA<8=;9#NXlW zAzq=uuEge%-UKGI|1|Lo4U2P)B{1jyZwCRP9ub5MX8+;TJ;*mS+&|JUn84!4;INU{ z!~e?V>ggRqX1p$oz^+5#LEy+CY_cM73L8@xQ=7h}$|i9AFRfQ(T$nc_f!l$SIIfQp zVYBf%V~QNY7AwM5VPh%>3gS~W0*{%QnbBY3-+%rwUKdH=^~lp+m$Yu(9v4}mi1+>| zh}gF3emM{gke!#@Wec85*Atp<`@s13$oZ=?UT~dAv+|4gf|_zs{b}j__&P8~undgH zVfrU8&N9YCDeC>y}GaJi=wY1U)cF|`y+_6&A(p8MY z`F%%Ra!>4mivF!`HoJ0U+O*#rD(6R!y?AY5tr&W_Wvx8vBS7u5wdQh^3f_&%YDaQK z!)(ea;d%Z61ZhPHU0|((dzQ|nS;{cHo*Ze@{@{UA-)MUHYvu6ib;^C|N&*U9+pE5c zrGe2=hE;|>j4bbZ+f+OIBa!6atsFD)h08hUd@x!bF%f`^EKU2kCXMiHqjrz7XEb+LC8%hfhwtSDuBZ;>(=XLywGlcv*(aDH?Nu!<@Hw z(?BRr_eCmwlGVb&D=A)QDR<&(W$IrBD>iiZ*&WOgCE&v+kK#0$Owbv}e9m1>!0I9H zff;Uh1WP=Yt$S;afPOXuDh4mIyd-}98~M)e57XE8#<}wq*3VcC$@D{P1@~;6qH(AC zh;+;LcvNj)n0PAVf{foC779PC;T<|`&93T!eXNsdxg8}qRrB8S<^EK1`~Mz?^m_wC zeVf(BBiQ$ao=@966-UhmZfAvLklW>T3wQmRcf^IeIw8Nf*YxR}BOF9~Gzka2rvBuF zNJHB5OLf*+O>#Sz?bL4%>&JSO)iV+hKQ2rc=0lI+XX-S1l9pPTgiH@BlBS~NXjRFx9)4M-;U_#A=A$3X$ZJ)XjWbCfsKDjz)ke4z}v`DlFDVlc(yi>e>Hv?3%lz!tf6a49?pA(v{`Q>_F* zdCd+|W)ARq*ImJ?y86g?xgi&feQOU~Eml?X)Qk6MYp&emJ<3 z=}CH4Eu^B^`@jHvhtHqXVt2=@djciSOj>Ab<)$JCjC}bh2mKG;JX+Kyz^1h=e>XHiC$_iYRGuo> zzIIUvOA^1EJTnd!eiN*Ju27_~D5`P4Yc zbOup2Tnp^)8>9+q8tL>2m5-h#tiVYf$IAD z>0)gIC=MNTn8jaWyCc9Gd$zc-RB6B%vImwtg=xGP;}3 z#e0y)W!RLzlqTQ?N(ZZjDGOJ;c}cGp8(3|HDPp zvW;l>omfIG1xz7ky8e)=Xgp>UMOCk+89}fAoS#>O9G*NVb3d!?hd=AtQY*hYfocE9 zEnAOLY;SH1Ixv9XNj%JVY}P~gHh3% zW5mBN2tmRY?wW}qFjn@-vKP@s?;{gU<;(tPQ|goTaJ0o?h98;rcYHB_rhW6)U+%E~ z`0Z(Jpf^O)ckaw3=)$jbJ;Ty>DQsS$_qDrXfr`PK*0o^-#N3hp{p?;O9#&rZ+j%=2 zf==}JSARLfSoc!6@Dm%X5)8fu6YcR8#`5s9mFb9oZ77`2MF5ZJ^|MQNeUP)^@8az7 zKN=5XKiv7XKLYqRd;NO=0rTyQugfiBaBxL-<2{}z6ozZaOXjcnXN_D|D>bzF;PX86 zBD;DGXZO?D8~6t!HQsupUONq&z3cPZRrcXhzg_Z49zAeu%2C+ZcK`yHv};d(U-NfJ z`!wl$AmQ^y)u7Gh{t$~yn9Hl(Ltby6WwiDT9!moW1Lr9|V* zv!>$VmbZ3UXQHs%?Qivk&R-gv@6d`@bnD~v9&Pc9JhV{%bH(&nL>jr@$rh8g2A3EZ z@_l|fUQEEdn{8o>1$JEL8c?=F^T`?DG#kQ zBwpF=I?R&>fu8DU$INVy>>sncoatE#ADrcf(u+l+)CFGsbC(4CR3Mln>3e9> z9V;Q+5v3OTcqib(MZGf&%sWr~wQ=wOd%6CQ$c#G{i%(drD^I~ZeQ4|<(O8VJU(a=` zO+sJe#{njjbljct{t+tghNRg7`?<(iJiw)ILlxft=!v8+()Ub4(sTNs^0kXk4*b*Z zzv;F`*xgoa z#(g$JwMmE89d0qf0?ll!D7ey-!im9VC`Y2 z@uZ8zmou$LFN{UvncQb*A=!95G+P!OA*hnuOI+PCCaRT!3YpVo_p(w@oS^((Dli83 z4D%1hg*uYyNc{dcmcCN7X~^6451rZ*S!XWIG`J<1oL0WAjyDPgA{%!5qiODDaJY*z z4wlv$(Pw2LTjL{j#>TaIuBg2CCM+Tz1@Ts*okh|3uA<=9cQ6(gOGV`^SJwRXtLtvL zcI5k_=hfA!sPom3?CfQ_v>_fATaQ=|coiQ1z7;7pI~kB`Rj*Mg&}ngum~-Ok9_F zKUY(pr{QH>Q{syPAAG7PI>I9ogFJcxwHA6GT#i!j9rzRrnxzXSCTGGBuCe8#1EV*# zx7@dIYKR3FufSb4GXmHy?QQwGVu<9D1f2{KencB=pD?NZcU^oMW#6_)jJJ+avS!%AejglAXx3A85S zsI7G0o7x;W##qOFa7jVPl;DNUo_^4s3GDe!@CDa<-VZ|UZa9D4+43WEIX)~*2TfPH z;K1h9=R8B9xOC-38Ow2ZXsSxSn=cQ5sB*nXc~Av<4DM~HaZdrY{sorEQg^h;U#{$k zTwAw?f^U3_C)!1+z_IP2MMQst;)=z(zv(b|v+_fY7>|sz4{&1VDR!jvlo6IN5;0y=*N|Uo@Br7Sny!3H#;K)OPv|n?H$nD6=p1=xc2>X zW&P}8Z!bsLvGw=&jECdehl_P$#rBYUOMQcfB_0<`XlLf~&^YHevO_YcA^fiH8% z+`^!A^L_ot+B9-JQ(r#z?N5sbN**46@^Ysox^uGs?%I(A!;@x161|St{e4LN%#&

OdNcgR^c*X30To{Dpa10MCL2EqOL(>Sgdrs zcP%AAzr-=xWOpr`?LVksT^wFc+Kx~C-Uw+UO=^eIH0*5hSXZ?R=3X?r6^MdcUufVXF9&ov{0L+@8I8aa-6LPf!r@p-RsT=L0V zKYE3LT%}t|PPXy*Gwu3>x-Jb{&zX0tkLAGs;iYUZ4g3F{&y8gHOoBHlPZeu|kJaoiQ21qbWiGueu&-2wgh!KgfH-^Ud&I&%?~f;`Vz8W zMA^tmPZ0|Np4ros2lY|7@_>V2Y766vo?Z3Umbi2I*M8%(CV0-0Zz|oHhGCX5ruBl+ zxH~V!xShuhk#jkeQQK_cS!)?p?w^Qm>eZzSZ82E(_jxw@K@}sx&PwYNY;m_gjxq2g z;UBrD68P$5biL3pQ?dNro)2c`)dmF5cxbCm7uj~WfvoGEIelHvSa)RVm@2a3B7Y`S(N08Ovz7I_}q;=1N=;$hJY zoSz+dE9L5iB-L|4=O3lPMrp)MzNH9K#a(-jKh*(~rHX_jy9Ie(YdM03uUwEP=e@YL zIB~Dvc#Qm>G~fS?J9+xJGs8omrb*kOpA9nKe*8NOivv$!u#kHyZ^#LXcWTqSoI5eX zRnki-kcJkS!I!(*hQO38k~k_o51uS48uOnw!T(2Oa$<`ewhahUhI6VSX+@2e&oqPV z*DdyaiyWNv!;Rb{k1j3yB6o7Af-l(v1MbrwtBCu%K=0SV6yJDkmahNJVH*x=`?QD? zvBID|e7oOmrzfWSvs5C}+;K5^-!IBEeJs##py(*|f%c>HDOM?e@G3V55@7b_ z8;8~d9kL!s-vbGw?mlVs$|c6#%hu`9?*d?Q`$ym@Y8!ZFK5t!jB?E5@$IcjRc0!D! z;@O$qJ_s(9bC3U?O71W3YsXyHjxdnshtw|#d+sg*hB5*Xv0FZ6RdwKhgY+^Auv~r{yyDpEpZUxOw076{3&aVqU6NSklC{A69=_5HKO5{^H*oz-TsY)?eTJ%nqu?_1 zyrxNNFWBtdq$!B`@E5eJ$`6FfkfpT0!Y;rBw)N#jUDC1WnVfeVr&2~F{qvvwyG22N z@-y=W!$256>O129)e3Aa!h5ruT+!fJT9EM93aUX%0oS*up@1!q5cwz`o61W0^#pxE zuxTyP}xQFM%UbzsA1+%w>DaTdj3?PopI z-ih|R{2~~k;xqCw4~_7~KIbYeeGwj3Z@D;L5{31vr+!If`l9oj*!J|rXylF42eSBv zqJg>6F8FpJn&L{=wdNFp|5(j;xla=J3!`UwR?ARn+c~B!0y&zZx_3vxKch?qe5=Q1HIbnWpRs{3qXVcS-cj z#*!Q?zc9ae>vsueXCzpEU*$kwdG5#T+(5(`+}nGFQ4LhVdK>7Je`&ZC$$YH3zN$eQ z=l_Im#>_L)PWuq5di+SWaW8tqAC8o#CdPx59C8nn#rm379Ir#Z|T9gTjah ztS;srC>WNGrqq`4xcWXfNO$3l#Zp}+URZf?z5mkL4Mx;LgG*@$O8OdWA8 z5_;OY=XGVmaMkcNL*|h%Oi8pG9duvw(@FF2-{{5G+xIxi2Sr)(1R6Fm*xWSJZY27b zbc}6#X^HWL|HPxuT8VzxAef=NGOdTL`KnbCS0nM{%A4FD=VNeXfbL4~E8@QQY&ZR0 zHBT%&JzH)+^sn8Mw+$YtUj6rb*?8>MDA@PIOLA;6XfzCW-VZi7y^n(7bqOP>U>}g; zf&b?FtqQ|V7VaG4Jo5g@GPXz4ACaMZ_I`*DmD`ONdrac*%VOS#A!6RvpRJ=y;c+qs7k_nWvvT8{3HSShj=Z=>b;I9w z|5`luKY9D@ICYfy$BXs|d8Gz zG*c}8c%`!4+Vo=>)V{y1_e{+v`&A@Ay;W(L7Psutp5Qxi)6EN~ln>ecr41+ZLz4G@ z{2od)zgU-T^hyOs{|f2;UP^-I%MXMGB@+aT39J`jbVIig+s+5WLAb!x@y=|6J!a^K zStRMI5~oGxc&AP@c@6=y|0#NOk9iaNaK>QUtM4K zlv@hgazt0CvI9_Mk+{rp$rF_g+igNJi03A*u7~py93g!&X#4A*hj9G;t=$SQ>%g)< zLYG zcG0ht>OuCteKjM01(sfBu$W9MNX{8VYpgi2&ej|sNFQ#9mwJ*r&q1A?Sas-xZLtw9fUl}rr@8C#jgV{-%^$<@L42k`RcMC znA>pp_H+O&X;g(`TYO+N#&UGYvl%tM1N`pXWvFk6<{me?Mz(L|CXyCrsR8&Qy!%Om zt0*=+Ybw)HJFmeZAGi78*)Z(>7&K!R;eqpoAq1b#5(w_Pd->PaK)B}QzO7bt#};EY zuJHq2aCcSxkUTlzK|h`B)ruC{l!wcdOunz_*3O+s`m#TksbYGA7sr3{ z{K!ZA}?n{p_R^IoYZT3^j$3)GKSmK-rX+*`=2c|lx7FfHCS@_%vE(T zK6CoS6QK;dvxHtFfp83e{^gw^5sIp|?xT(Yz8Kh=ty4if7iVXQN`2eE=J(2cf6&{w znfP75|6@S-8h~@}{yOv-x?<8sFv7-34A#24N85jUL4l#$L4rCLW=GGbok$FZ^XIW@ z>ka8hic^^LmWzanRSoea(f{C{PoI$T#bjr{OGZ?y#t93i3*y}o=&;%N$j&YZJ*S8J zdJ}^X(M%Vx-$DTc(b;2Wy|MV-BHH&oHXed!O*S3RH^ajP*3nxRJrJ*3JKf?R2^z(W z*}l3UFuXRatTpxoo8A)diE)20#@Qmf#TyZvYFs|?#Qf`>^)DAQg8`|EW?6b~NbXFw zt7@9hb7YEjb!!)Hnh_cBWFPHW>*USbZ;*g%WuYMqe zrn1`@C9w%s3*tV}8U4MULBYSM^oG-7YD6q|6|2DzTgf&dg;=fBfc1L zO&+=(g8jEy1NYlyV8_+*wyo4@F#r8V&Fyj#D6YFTr)um)dE|KSe$g;w^_iO+-}Hw_ zOZ%I;4rxYWi(9fbu9Da_4RqCd(lQukFbZU{N=dD|EVit!>$q8-wy*~m>V8-h?3F2yhrFC4FAzLw?X1e)9LcZYu@ z=97;Qnv05jz@+p2WJ04KtQ|M5XWXBPmGhJI(z3o-cx)H7QJJXco4ie(W~?wB@CZ2` zqK6SGYDz|na7-o&mE4<4153%>Tf1c)5THF2xnL9ok=4+yk`Q+k%b%5>TlU1CGc(+Z znZN1M8$><+!+{W23?VTgG4efXo40(svh?bi?Jx?O+*YftkWqd?= zREZ7cUw_>2weA4hh3G+{=s@hg6M3p*oM=yN*^g1|E<{)HZ!g&`2{6i*xXeDRkDdd3 zgLComc<|KhL(GP?^Y2cZMvncP1F<>RWoEj}4K;tuiS8Uoapc6ozSUz;e*lG(|u=+bP&OkH^T1=5Vd+FT&Lq7dpzLuvso-B2>Oe6 zA~GJ<;HlJ4AJ;QA5EQIRtN%odhyNFASU(zzoUw)G){m9l5XMO@?#MaaD14({vP&5v zkm*SDT~iw)GO~5Rzj#usri{p^)j{EX#QpDcgXs{}H8l*3=5qS{4MgQhnb|_g2-I!; z7GLovh&*3>8m0M~*~=QwUPv!;S+U}){p-$`aKe}ZrR7eSbQ}`!m^gbe7!NKA>V^b) zqPxF+zM3uoL5aTwas|CHd{@A_n936Oxn6%e)0KpjYoarP9!BT$BQ^`g?jv7 zg0SP(hb#9S)4?0@p7GbKY;Y~d_Bgyu!;Inkg*z+u$bFLa;*PW@Cf{D+ouwS(gkWX7N}L5qJzEDyBD_}hiO+L2p&&HrrcZG5G!Q{FqBf#NOp!+#!^ z!oQPl#^=LDd^g__A|cTYV^_h=btPp;VesR;%aDd^hj%K3u;suxMX^PTKL!y#_l>-1 z>oHW*%`#w9f`d1o%+_V{O z$M=Di)8WR>Fnh#wOApfUM}wvM_lhL3o@uY!Ru9DbXI1 z+GV@xS~U2(Akvn*NuM_gw|Xn3Zok+9>(Jl9PkwAB)2aETL_K&&tjFoyq<4oW6L%wC zXVdBu>wt|f-murG!E(kaJsRu7ICd|<=(XHYsOF+a*2fPYbdwrWh<4Gk;FF@yp9t`+ z9GJ=B3CEVUnUh<1g5f#Ap|&6&f%WR#qLn=nNY%>Rp>~Z}*S*KlY+GXtuKi5anzQjk zVxxYASEd@i^r^6`_Nrz1|X-5<4Pw8!0>d{3Mh z+@NuL{#QA^+%bNEZ^{?oCZI!9=zuHPif~42T+FA zXz$ahz>ZgetP(^t2>QI>yr%?8Pe!aI(lx=ANO+Q=V-BHnRq`7e*8D$`UbFoFsN1?a zgM0Kyme0*3>{c~-q{$VIwhH_ITmbRD5_CNn_ZYgfXRv*JTyNI8N@l>EFjqV9g%X zr5*EzkKbY-p&}Z=yNB}M9uG#%Y{n%zHY#wd?1{NO8-V+cpX8;_#=`2iz_r}C1Gr!M z*LZWC51OqfG`mKL=Qrvn<-B(lL)Fc|&cW6W-?uaHZ_lGb#+m(RuI%tZq^A>q)*Vaa zhN-)Ff3t`BbB>nu5B89t)-Aew%MRzBWK%UNXQF&c>vgP!FVvqYziMkH<_TA)2UB#N z$@5E7-NuCPAf@NnC$1;1UX85PDw;>wTBap`WFqk4qN@Bubr@A zy&Z*vNGLMY7V3S~S zUIq`T1GI-Wgke-tS!#ZPm}mP>e0^G@ttQ!Zf{&P|IdiT-mC6fgi^6q@Ti5C&EW*B@ zy>-YP&+LLtoJMm{!%*fmOgM&XN?)E_s=Erwc*Pp4r)4NQPZ4^2cPVE0L+67w=VST( zsRg&FLYz=#++xxSY*@K%T)|_Hjj!I^>b4~y_-VvWtpy$QA8rYWBGxw?dh6i#>U0K7 z+6M=I1SG-r^_MojbMCl2!R%1tRf2Gd!f0)V5HxPzP~9;Q1*0Vj>g8Ew)PD*e5^N2H zb*HB3lL2?+OZ}eN+U`O0dv>;8c<&BYFWYvPUjeZ8%lCT3;)6!(pB$~)v8cJyA~w8% z*pFy`1?8SpcvZ-TJE-l&luk_&GwK)H7nMxwK?lr#CFBK52YVDuZWd=B*oQ6^Ld! ztk!)b6_QWte(kyE1Bo-UcGU)<7*8vko%zTKPbCZIXTiqkRw-sSpgW4MZi(+i*h2A@ z=~wH{J4x8i9I#bOED}vgr}V>$JR!nTU4M+(2N&y{nkVk6A^O`P;n$z+vHEiV*ms%$ zNUI3hl-&x%Z~lfWn~3XAOUdxd)hFIKdLdN$UTi3|I?Z~uvV)Q9|8`&Q0}m*$8qcnd znL~XvlS=MZ>_2i>?OIm7cRvuRR-=vujtV##F#n){SO@C#TWs5I)j$N+8@o><3=TgE z$4~108|QPTPJLvt>PK6C&uo5Z7J@{(*6r~i?$;wcJ^h*+VaXayVd3&dknCm_RZ>Ws~=9>5Zc%9vJ?((zhxam!!cc?G?ll+ z2eHXxE2f^i$ou1$`k$Ysxt;N)Y~oYYqQ(EN&ubc|rRNr_i2B~s^6vH1L`zs27<_rM7y)m^ z=xis5AT58p9`9yfSW>lFj5r6vC5l0owJHFW+7GTcH|ImbBJT-ntuyTZl+c_4l^cEGwI5Jy<(lb*c z0uOz+`8L;DqIS;@xBN?rIRE6@b-v(4VjRwrMT_)X>=(C6IcEb_VpnOq+}oz}_2O_4yU zV<$YI%YJ(Ab3I$oN8c~ouf7*vHYPg`-H8F?8SP({DQopy{^zQa)I<#sBet3Iz}{U95Jg-4>(*KGq(hKZ!SPsDTH^Gy92kHb+)XE-VG zC>j!vY#+QZ@DmRQ67yKddML>D(Xf&=pGB-LhkMfvyZVWB zN)8MT=2ge>^V=1z<3xY>P|kXW>aEe(y`jyn!YvaIvuJxLtzy8u;ov~O?mj3^#RQ-0 zmBsD*tgpL>ffU0T&BDa2Qn>hfv%N1OuE!V2GLt9#QR8(dR%kj5=X9j@@f;3C8uy_- z>f9KN+CB8HWDCdnC_z8x^$|F4v#4hp6OO$>uJk?rDTrCN4B#(*iK(k2wM7EWa5%`J zks}y@o{U zZ#bKCzaOS0bw(!z`*A>uQ%&0V6dBV3((;qU>)}mt1nw`cVla`C@|A2khQzxBLq3*b zv{>zGh1kFLnc=flY^nZ!FeuTzQgDffbpHWe@4^HOMGZ1!vXo%zmy+FH+pTCPY<{E> zR0_3g8yH?D6Z5rO1$s`F6YCT1M#YN8IO6)7f+qJ{rdYHc-H|lvgU)RhwS}B97~8ae zaE{Uw(Nh<;XZyBjZ0(>;-*G-iXAM}S%c`E_OS^d6@ z)b52$x}-uPJ1gq<(#Sjy`b*w-N%r@@G3Q}^&K8YyoG22LV}4VO#HPxN%^cNuY(aZ3 z?HU2*t~Q24tWhXiTCiv$`q6h;7+>-@1)`>6DROzgC-Gj?r}`cDP4G7Tu1Cp|4;DF1 zHI8dhpopsYOz{3NV%^rR{faGxV7wt}Yh7}X95?2whpy5o9YF7<9onG=YyNn3TnHOs zDiMO=&3$sB5vY31Uv{8-EsiYq|8{!(XaF?liu!KFB!kIIJmgiiBT@n?TlItDkv3yL zV0zLW;o;t2@`?3+vR9w?M>0#JZrbGGRj$_>3-klS!Ihdw&18}IST>|_X1-~w-p)ug z%6WI(&xrwlf|Bg9bAIShEtLqV^gs^(<@jq4)6s6EceyFR97goi{X0)*BiLlZqUL!H zM56vA*ChnN;qSfIws{0x2wqTA)(L~v)WpZ=zjm-`-u7&ZpFMaIm-N1#&nD*c-JiVq zRD|IPO|icT-Q@Z4R1Q$`IbCmEWOu;n4b0+)?*)Om_Z4OF=V0*M zf4jgyy{1RfI#^B~@Oylgnrsgxw<{SE?+Ot8nNVJRMork?e35sfDI6cDmMh|IhjyxKgZX20tqVx;z%`v$Vb5LH zk@?P^96K+UnhMiEi<<@hnh+Aobadghf_GTII)iE)^d7SaGx-ziu71So&S-f7i_Xp~(h> z^ZP_4iTCh^2{#{4Z(Z{%NOIPcT@jYv;*GnOBkpl7Zg_pjW>DBl0Mbmx%Prp)G;Dr| zj9)mP2>0(bl@7jixPRC*_f(c1m|DiDyn|!F8FY8B`(Y4@IyB~~Kb3>?P7qUTV-{YO zw-j(3C18Wn>QjGW|CW8JV_^ciaLdRe94{v3$JH#xe{GFHq|;Tw)v9kA)-_cl2jqhh z9}{w2n%){>{$vf_!Dt!)O z_2b74pv%YQ#QWbptsU`9f5Uj1IUB}gtt#J#+X3m+KTWc96IdSS^qBrEfVS8@%2NWy@YmDT6eZU2p5Le#Uzj8UVTZJ<>IO=% zOObEkyCp@Iw|_`3ezaxne#oBKrHnsxX*fsSa5^xC2F&q>cjWgfAhh;cbm56z;G1#c z-FR6Be=nZkHM8S`R`&A&>mR$|ZT|G-jFl=JLc>gs5bx^*_Sv%LNeW>decF}Cb^qp} zF57*4I4kagZc2&d;b0xSuZ%mS+^YeG4_nP6iT6Gv&>a zLQe#5h_4v=J!zgdG12fonqMaGpASnrQwJ0_qkWw1=<{waGCe;JgSEB3Hf#<*rP}Qx z17#QcvUO#`Sh@Vu?s=yU-fd8Pzb;f6k6K^;wJ5iNsHsg#UHrdwJ*06-s7grsA+uzG z{Ru`ZjCYlxqB`O2OW-Gu`_QFgrPxmnKPw}70*s%M`8;#_F=3*E2BngQqAvz^;A2tw zVe?08{F3@7q3i{Vpq6eisHBL|-%pjsHHuy57@aI(*k0`@=w(G7?}USq{_hzxw71i1 zFMig?kgv6idaM$%6ki5Wez1dd@`W_56$>2bpv@h~^2820n&h~*hag)r<}>fZ1tmg< z%-?GaxL8qVb3}u9AD}qz=a-PRa|;qb2U`|SC!`wS&H9-q9{zi9Ep9wF@PI%1OclEy zP#way_eN*G4jv^VsT~RD_|=YAR?TZf-E(YuF(U%k7sXzk2D)T9M#2tU68#eAFaTr))5lua8VGhGfE!%sEalBgENUA~@FG3C|U(?Xn z!pNT1PX3^^doQH+&4o;Zf>PQbVcd^DuS;Xr?$?s&UNp$%#WY>Q?e`TF+r$RQ{7&cG zU-<23iZni(kIb=g5Pp|2UVg#^>pqK52ipll;8%07m8CL9f=nLK&52_?eC3wZq$XDG zH;Ob#3nJ(u#XX(?1|)qJ`d%QckNQS0rY};~@ZEn;aP;Haz1{wVxnEmvxZwv*wVoGb zhNaoa##1Q{_|37cQh6O8HZ6=cudwgMO(owg-0Li${$QxxQ;#3ij;ED0atFvrlBcTv zn}5J`Hlzk9oowiG(d#8sQ)QfsgH@Y@hWvf-wTs8)ro0%|yT4ku>xn&CUh}s!`ih#L zH8k0t2Qe=2;GNIcW!7>o%%rK_uxv0z3FGUX-y=BjI>u(?^5<>{9*d9p+%N!iSnQ4T z+Kr_wmBdd)i<1OtdiI^ISvuu2?ysEbGlG|JpuD znlBRG)`@vvT(kDwlb%OO*8U!OV}%|`t>aEBPSA>cwxm4C4}XKVMdnr}VAk^HWg*t% z+jgaQ?Tw+u$`OJWo0=k7&MH&e&sYB_;Cl)k*O)8uUVdDj-%_>$nxA;*_qdqj^Q6Qu z$6;-hc2(RB|Eo@JCz~&zI?JaEhwkd6K}~%;{K6{Glt8ot-&l^QM~Rc^NPIf1zZkQn ze?p`0rhmjP?VU(5=oD0^*M#T6#IZ^%GssJ96~CTJ^dB@E)wnk5V791*>$cD?v@-qW zSFGkkR=&%*q1=7=n*S*5Tf}Z$I-vW`y;lbTnFa0sCer^ap8^)`Tc{QF_h94XXHM2f zI%xKByHdVI7lC&j>L2wfL$>zN$AxM|?0A0G;7ISPM!Lf53sT}z=t}gU?0?G#6V>6; z{#q>zXEUuN-x5LHt9#GApD3ZAc!>8a@!oFL^FPrA+Rpg8ODHMvkThfh?#FbyQ(@@) zjaH#-XYdhbCcYJUBbxFK9ovRfH2P5;w{A&?M)$=&n!LzHnn-AE}f4Bvn6ar4u}OOc4MqEInpB^~Cn zUQtB<C^fk;q^l! ziAxR|_}2M;!TGT?bpEo=m=+7-k)Hcxr~)^pd|LDz=6+}>qcPD)j~LGk4|;LEVM3kg zijENV+P$WPb$m2`0`1V+&mlFC3{b5I2pgqr2J@fytEwqh_?p`~6=(iSBgWaHo3Bv; z=G4F5JsY7##x8g3R#kma@fC#54ogG+evEI~uqatR&3iYgRMgk}y_nRoU2#Ecb-wb7 z`kTdqc4MV|VkYL1IHbHeR=lJhCP}=@!$D3dp9BMK9HJBog&;>EK!Gblr zOww0gvYxc;eQPjSq+1>O^OEeJ^et_h)KA?55_*r$H4}bzg2uD0W3KZO+~z*D*31#> zJKu4~j##^s=}7b<)e46DwYS4BHF1K_vK23rnZvk$Z$+G9l$ZTUO-%db+{w{pBi8@F zNm73+kMpAxU+%8|*S{0p5WYaoAOlwBX{w)FIM9DgjYD}t8$#iescf@!K=Ks=QuM86KT#&=HyGu8h7v;(G zL(*T-8O_9Q_0Jll@0o<8_fh;-iT2qA|FrvWI%?g0(moCU?&pen6fL)o1R}ys{Jh6> zKi;?OcdXJJz-Z_cCH2Z_^7_7994%7Hsrk=1KXdnOvp)21+)d)||1WN`X^!XE?oJ-( z-NWgLg}LYCbwlb;*!kI#XrC_(MY`IIm6#xBXMegyo|fG1i^0W9OX2^<$p(!J*{iz9*dU1p*Dj7BRv z>%gbCr;y95pPh26pR6BJJHtl<*9DuTU`=bQvQJ_?xj)r~X$cE!O+-;`U)bj)M}9uD z{B@a2qzEYce)rxPZr!i8c7N=TovNub)6+B4{r2gT z&vWjiDPq3fboprqFDPoOG}G_h4u5;wn;S)PX?E4tgsPM7H`5XMuwOY}LkH3CM%YLG z=%aj97+c-7ag_o6-M{&j)R6wY+^_tn0akbHYm{sv{5|J&FKK=y0ykGqx{b>r)hMhr zQG6HeIOnHy9Di5p!>q7Y?~a}vbvHGUq}6)|4GC73|;S?SH_gl=H9VlMf4N}-ncrgi{i4?@`ouJP^Z6d z*X%Ml+F5v&eqd2MBl&w?5 z!{mR*b5$yBSo3!7f}(_6^QC<&v5t>3YrlwXc?^}2voG~~_d7(w4Pj6_SXn(xm zPv0DDtd6!IxB>3Qk|UesQ1NPsiBTwNss69Wui9|u@givyh%$W;{a}Voy}usp?cm0F zWer9w7X*t@WM-JlLL6_&VxJ+i1PL_cXD+L4lZ1$)sL7T@U7A00UA@@;)=8i_E7U1= zh#&KvmpotoP=#%4*tTEeYcQr`dv8yI5;WN@Yvtv5aPhru*2v<8`#^7z>f_K{0qkUa zetox$2P{oI_-@b>ys!Uy{HLGwZ#$3r`TvOq+*y~PY|n*vv5HCt&!nOGSUT8oW;=Fw z)dgC--H4}c-K##1N+bCO_ms$+093>xvf;=PKzn+ctu_;lFBtwgTlCH$j3rcghnta_3SW)U64TA zaF+;L-BzYDZqp!nh1-8(#<$|G(&6Ig1m}P!`{Ig74JFibzE&~KmP1OxhF==kM*H1^ z%Oi$PpWTOKH!tO%+S+J6mhM=2OBYY|O{{KoZ^xKaOm%-l0*o`)1e~6J1iSp)k{t6R zwEL+$-te+!W(MuO-nIjM#l??l{u9-Yqv~NQrTV2S&+0y%9cD%AgB<~ybVN?O%4eJ_ zjiK3FtN9KJZVb}IrtRu`BntL}#do#3MCI=P(*HkHUO4YKht<^wnQ*W3`F=JCX?^gQbi2X~^7@80Rg zk5X_}QlJyy|F_@!zvD;YTwc3&H+A?>Wd4r$7=hjH*>2yvM$i~2&+OY|07v!j2ahnS zAnL`Wu1WVUn0FOP6w8sgLR_wOT>gUmp#DBpK2qQ3Pc)aAF$hD%UfAEnstEIY%k^IC z#L*~qe<}^Tp?vz#4jH)CJgF7q{z=ob`M;)xAAi|_S(~@~7dqB~GN6~&DJ_dBu4CaI zJ?f-?)`eba#~$2tNjYr1=MaSJ^g4$63C`5nm*G=N*0>h1QHYM zKg;*)rD?EZm)q}1pI#G0_6UDr4ilm6x8+BT(`t{6aQ$PLdpAWD^dB0t!?|~2jV3zX zD(6(@9l~>eyxb1Y$KIEV%*3!~<1G{U{R%i0J+-Lm`%bt$`MG~{o)i}^k_V^AfqH zFjFD4k8?Hc`cSu1X{O(0q3g>Q&YOCjsFb>&!rU9PDgW^2pk5R((5oArG=8LFChBR(9;++gzkuLbi%{Zels4HvaT+26@wF7RrXmr5fsnJ+AFH9M|W7^WyUr7aDCS- zQE#M${=71+t`7@*9m+3shBHnF5543sEM2ROFT5;VmVfwgeWp-FD@`7|SCvF?O(>wQ zQN>gxcrUIGoR)90U%(gYWj)(iSZjgk^@Ap6`L48ekBILvsrTN1>#p8GhgM6XW2kDX zjYA6Qk4(gxuc;yNV{?u1X<_UXuB_wi(Zw!~4f}Xn6fnFj#QWe*6-aDI{IF*^5NT}-#%p`dVK0TH1hAG=I{kQz! zp(6{|gDYSut}T;?fP78q?{+b)A3sm`>bEqB`}1#qd_oB##{TwSTeY$7`IU9^e13)N9Mpks$hU z73M9ecdAqrk)YeaG!?fQk@ZH*Eyww3*YkfK|Fq7L)f_fv$hg|&uw%^lQMl(qUW58OzYN2>cw4ukkoA7a+~9F+nXWoFHF<{ie;ASHZh3_^-lX{v)VMFzzlfK7{4jX>3<@W@ z+{*cdz;alvGF)jPp854vx#FpZ+wo#egYfsC+Nd5HE?5(|7=5g7q}1ue!L%%>MJZ+> zUr>L>f8^~BZKzOC)afgLQN|vlel~5Wo@X?mjQ_izacq(H=bw#HG<$(6hqlg9DlT^_ zlm6@(6Upu3IJnoA$wq8JuctjbSN-FN5qe9jJKg4;5Fe3x?{W1i1kB~K^GY57GiAG_ zs`75^Tssmrl_rRLi=CI2TxX@Nx8aW}|7h(6xzKu~W&K0OyjU=oWs-T zVf-aJmVy7)CIBa4EhnTN_HqpysRF=tDPu~~Cm8Ts382CZ%VE4}c(qQ-;3=r0L zmqDRPH&a!&3dDr(NLFWSL4Ji#U7MpC{C2#5elcq!95(g#Io)Qa9S@aHzL$viALq29 zZEw`q-}BI16&vm<$HesP!F*k^yX*7;*fE%zmR(x74<`;BSgEkg0_`F6N%@m|aUfEd zW0kH9a(#@B$+{{cXn%E!^A>4LJ^Hfy;@i9bvd8|Xov}2-@%NGT8I`n~C%@PGYk;R; z_0xlTbEMVv;%>PBevFvZbM-IyXO*`N4tW`&|6M{%th zsQy5Wb{$W*=YpR+D-ir4QSkucW9oO8o2z50=&IUO1sCZzh~IIKTNwdcuDnhEZ3(dk zLF13As_6RpLVeV3fv>3XA1W1-(7Q2qOASRLp=dF-V(}{YN-xv(?cO-e zzT0xP?)C>uVJuVh+V<&~4$eyJRbJPX!$hOugTJI7VREI5%W!`@j3|-6i`>JIrDj`Y zP#*|=8>U=Uo)DOetV?+mX^+7%RjEFIKh$g*8XOXLKvVBp!<&i`kmOiWxR2EPc=HAq z(9cDq|EpM=+cyfvT)xQl>r)`?{-tG#EeO`Cm8C+Zv6y{vk2`oK4evyHQWHmguv$EC zwX<3vmZrbFlmGV!uI9@K+Rp|Zf}`RMBj= z0bL4SIi-)XJ`2Qy9KZW?B8kXT)SZedrO?(l?9w*XLmqxm$lg^u`;Oq&ql`!^8F zORqb!ibg|VMM2IP6Kh=At*>2rx&ZaR-ZTD*@rJa3gXvv^I2e`mZfI|dz)G*r6|Y|G zhk?7!@(;4{_$$Gq(^eY*it-6jJX?f0W-zKfwZYZDT?&pWas4c(SKmxWyCP><%mvw;i!tX`g8MqzeXatZG-z z7X-s>xMU5f&*cppO6KLT#m-1Ob>3a1Zznvob$Eq0>M|PXlSm(vuwQ8S#)eGrZ$JE_ zOd=JGB^^xdtSR`?w@xsmFcITH6MjP10#Kt@IT8FU3|9?p6q*`cQF&9YL98Sb!}2|= zH+fRvo9ejM^Se1rjG}{e^+}&w5|?3#L<;;joPPIHkl;pTpIQ8w%ufhD&p+Bo@Zj1f z%letsEzps0MchQ(0&W@JK9A({&=bJveqZ1?>ISAfxh|)n#Zbq?`CJ;Bk9j43dR>X8 zV%zN99hIPrg<4&{uOlvUtwJgcUS#N4%ET5ji>5tlmqZ6nJYk)R}I@nPx`ERd>Y?skNWBS4u!|8 zJe^?R5jY=K%jrE4hZ*j6g>y|&XgcDO4uamt6C2_D$?kwstZ*e zMHKM8s1u+5l@ET01EXcJ`H*Ev)kHX zUN;?7_jMSos7lAiGr4rf2yW%ii71ne9+t4Y5-@Er7L1+l7oW+sm%}mPY+tdKF>Sp} z9tE=P0t=o^5zXqi8HbI`jv0|iBufqKKIUZU#rg5rA(9}nMJPoq6;`qD)@ z$HB&OHt|Y$5>#8aO;w9~VpjHepR=kPs_)(jUl;3zlM1$KKONm5y7YZ$KAEqPy5_L! zhlBuhX7%2GL;9j;ttQ;6lS9#I@2bi-nT|DEf@bM8DWF_?dUkX=8q;t1g?QX4ptq{n zoRv%Z{?9#F;`hb}4R@}-nUoF2cBasFQB`&%EHNF98V|%0KBpKpjwlijuqkJ=_s8V; zXOHq_q;CG4Nwx|uVuz~cs2MZrvY2gQkqX(u%zPO^t!bQl#KMHi$v}F05V_|L0N1?z9pOld9VALixMNRhwL&ko6!Q@w8 zSlk?m(0E7mz+1beF($E?+D2DXOZsar-rIe7G&u&<$wps-!XwdNks!a1%OBB7cc)&E z`3!ZYa_dgKw18qtt_tbu#4x|ye8;nzVW8PamBJe$!ssU+?ZqU zD)vAf+eMXfMNg1G|8K_%f7s4e>X%)PfnZo(+4PSH{COR)YFE4$x*LpG+3W~zaKG&_ zBZ31c|3OdtxGlluUE|#8xt-WA?4ld@ev83G-z4iRUthQl?(sS@6pexR8b*PfA=pjd ze)xAB!E;dhxQ)#^0d+Hi(|$ccSgQLqz1_uw)Js<>@J9K<;qZp6lN1*WZ9h0&^pNNU zHLjSqVZJb}2<0(oCG$Sw)*RAUo`Mj3HF9Ta|Ju}#ws@yGe+)$0>b$bg91$s{mYkIqeO z7Q!6U&O8RP{}yune^Qz6B zx*|~?kZwAy<%Qd4hho@CKib^cx2suVqG0YlCEoHX2|dTB1YFE0xM;^-du}Kh+c;Am zK0M`yH|)JFhYUEd`0+-LvA{{MZ=Et<#(Q{ zsnCx_g5)vld0l_3(Qw-MVn+b@xthf@4ib5CtaMTL*l|>G!YFEAER8zaeY$b!ZX`0a zltZiz#v_5r%S=4b1^p+Tnp{8n!BCdC?Xvb*o+o8{Bp?YNdpz02o_nHzL+gsihg7Vn zX}Z%w`qX~TSVN30`9J`{gI~woKydG_%a*2mDn10k_i7$!(>02kxuUoH;+kv0|lrV*KtP1W=-#6(2cc<@)ey;Y+^A{_L+zpSl)uqW4@< z)$XXs?WvY2T9t$rsT6@{D_v={f2MR}zq>z~OCi=1U>}B*`y6ZQgL%*l7y0i-yQ?J$NN>*28MsI0Ae46GbnE5e*QMt29qta0i@wQ!EE__JP~$lMBgc%ChHQey42@}GAK^`_D}mMxaOqUZ?H#! zW-ktF@9z?Ah=yt5&*jBL?ojm?l`ik99LkS~MNjb42lMajiJv+jQzlb`zUPmt-<=6T z3g?hX1evcw_0xqO_4>S2_eUPX{4PgQCqI^F_D&@?p3E_0VViT_4gJ|Ob8)f}s0kQk zC>e`F?Op>_qpyyTkn=VUG6;gpyzT+kjfAh>my3)O`Rg}!`PNtiso(#gZ)14Q4HAn+ z(u0{I@%Hx*-l((L*m2piFw7?v$Jm7A9LfA0q3JT-ky1Y}aU^TK$qs{lXmXG}v9lQN zPUbEnxIE)5;;X!WC4(!Se{<^BAk0$ye=pK>!+fy@LtuFUtV4B-y!RZ$3-!-8?H+|e zb+zxW#qAMz^yjsw^@~)zor?0be;f=00gh9lX~FQg;2uy)`qx=Lu6oscfdlpvU8yN+ zQqj`u5d7I_KN80`_T67xjtLtL)Ap^UNZ$9|WPl+M=?<&ReaZdvP4G|Va7hH%==bQm zT}#GWqaEjUjB_z3)I9a>VGtS))Y%`A4#!1@K56@KU${~j8pNN6()<|>-)MoEdVe@x z7-yZZ_9wWZB%RKi1vA@`p4+c%@$E(Ig>S|MDB9$f(bVh;;ZLVIq7G4DvCp3MNnZed zG!!Si&?oi{q}(+2`yeS$I7zBLA8Z$v2yiu3LUI1mXVa7vNOOM_oXcFumxv9!EV{n> zAo7=3Zb?*_R9VU@lo!44CM{0H_=E0uG?6b2lKDH!&IF-)_jmQeBawI;SJ!G!n1Bi|6mC0`Q| z{q0(|XqPuQwS?xDuk=T@t-Jir9ulu8GBMiB6O0hs2Y)B|0+DG`_scIh1n-$sxcHxx zgX=PHiAd6Mw8cCf<*+D(hw{=!rciH~_R3c&5&M$Kckl5TA7><$_ops>R)E$NcIkso zm0+mk8`g2npy`c^hGj*ZLZLMMkhjan-}>l+ze)AusCt-6seb7Ux28&$O+M%bbf<5H z_+q9(IaHnC83pY=u_+?Z8!}CW@gWZ?>4;v;{XzNqXl zHVE+A^+_dlb;!p*GX&pF>^#4rNC<3~OLA|LC%80iCJcX4!%=lN<#y_51eisXPfnZr zW9U@(qiHJ&P8>5^(%O@Vc->*1dTSe$xFo5zlKycvWyMi$#$cM9e8nBcB~JQ=mKbfn z82vI7t_?-ura$5kzc08Taw62=6!}ln(wQ&jhb2++hnObEoJZxSNGy?#9u;t2_oUl7GUzo{1g_W^2P4 z$h?r+Rqwy4ULt<4Xky3PcjSIqN)ehW_D3mab$})D|DcirNNgea|9p@QHT;gly+-(N?I3!))?i(9G@Y_SWHbt`gAo{p-X=Z#l zGN+|v{jU=|F;%Z}+0aC|uQ(fh#gNQXxUZ$v{MGNj{EFK&IYNy`Hy!$8PDk$tyR(6! z>9^dmw5I)vxswO9bk5&e<`7Qel9pX3nmqAnp1I-KKqSmHr2oh#<>1&}x=pJeh2X9v z%cBz<@wDs1ZWY9Jbx05&>U&Rb%MwENMrN1yijBvEYv;hS3aw0J&8wWxBmZ~5|Kx-FWd5hv*E)$em5~_yG_8H^TR2SO zE(EM3I8Xu=J5AXSAgg0>LY-DHnz?JOQ8&6=h_5l+U4B8S`mYM&r^}*Tgd#Ozi~Hs32t=a6UwhuPYJ(0Pca`JI*F$m$0={> zh~0gBip9JqA6fg894^1hr`g|BJAB6%_KLw6`?J1q~adJY1#-j*@S5xAiLG zAB`|-?ujOILZ@p=g@=7`D`v|wae`N7S+ef+ja!izQaNBkcRv9$#$UJ$7T2oPD1n^m+2Fiz z9$H)|e==BlTHOHOLz{PXNg2@imdd|HYO0pPbk9`&r(V|hI1;`6!s!22KW}}jkt$=y z4~Ei@SI-M3;>v|%zvHe@V64JtRn~S2o7TBRe61yY%lT2ktfX$)GH$N#;K}WXpXU7@ z|I8X9%90a<-EnBq^UnA@Na807t5~bTe84SoFMuU-5A2?>cWARv@Z3PL>}xR7 z3zsu*o-M8OK;NV9OG93>Y`Bt1>9MkhBe>p5TSeMh=W}`x=I>b*$FhAz3EZ123dP@}UR(Pb|Cx23OpzC9-s0a9NbPY^!xWfJ_(*-wzBdowd&(kJJ;vze3 z5(Vl=ern9H~rI0>; zOD+y8f~&H3{>Eo>K@!LR5^9zy8iW7TlN|8AU~$gCXd!Oo5EJpF{IEZ=4Da1b)W`*6 z6Wv_d{ScfPF5N9sOzOz{#1}D1#bflWm(`j}No20F$CPX0zwJKGwNoR9UxeVJYxmb> z$z_;OHMN`VkahB}C#+{EoPn{K&{N#13yX4`-ZLwSx<<%GjWA z5G`ItwbQ=|bv6dFRG>6{gM55|9E=*5Ouf_&!vpD4nQqxBsF4^fUp$=vqjtLF{c8Rg zXv|VM<&_9Acg!g#NP5-ALz$#{~u%EJ?-qBJjtx~4X*62|1o%7z?Dp0 z7c}w{`e*98U80|`>wivJca!xK`Ohiq8;$%nktyr0|DLZdO6IF4{4;$0|Kjn_ZvXxH z&ushu{{H{f>wkXtpXWc5;;FBx^uKTW=k@>1lltBN->IKipxx-X-QM$$mBg9-dBi{@yl0PW}OIKHh#3<}76M WR>^JKB)3WW`UC_@`$vh-c9tA>G{#hwkn!=|({Xkp>A-QQ-grf`U>4N(e}| zfcJd;{+{=~_ul=-dCs0$v)9b*nYBKjwPr_P*?7CS+F5zI^V!ha^|U_&42NR_?YQZWi7?=ypg7jekA-|Cd=HDQ(<+-8=*N{#(jtfqo0< z*Gynx@!x`fEiG((JiG{yRR2_{>k3Lr3X3DDW0BXjku;%5T8@8@wRZBcaIkZ?^Rhw< zBk3He|Gky}YQ7PQq~|ciLU+)`E)dD^KPqiJyzKbgd|iE^)#Uq)ukt`glSJXW*W09=?BWdds=xIlQWHZD> zEAkaV-d4p>y%LXP*G6)1sA3sjL6_b|a%yO3{CivfepE4hkX+V@vaj(mFt`L-`yHF4 z;|yJwdGUg4YRdT7qh;QTj%ETM~1a@wa1O~Q+xp0nRa@`iF$YJ1H zzUu6w+H0{K-OEXjGKrpiVYFy|PX1TjUYPE(=i-WCHOq4;!iI_>3xa{mcKt`$1MDpi zv1X@|c#1}%x39g9PMKvA4yMoAzUckHdw6pqt-Kubvrv1BeF*vK)fELjQHr7zPBV+| z&c+_m-^tAX&awV6&C1;olzbE}M(sI*}sYix+KI<&9T zgbmgFO>@aXGXJhze%9IQy1T%9(?L2z2^p^YGqEu%eYmYw>*JgJ+Y}rXbc3z+&z#%X z%yah%{AWUL`QI}U6ec{UEI#I-)EY0nL%q+lF*b94dMx0%g5#{l8EQ~@LG!%lhsc|^ zqPHXS$bDpv;wDZOsyX8m_m>QfdRKpu5SnculO;)4al@2ThS?QyPxXZ8a!mOC$O#I) zmr~Fpz4w5dE(-T{-QeY|tZVmX9d_?q zp+K2bP^!;H@QPiq*Yx+J+Z`f*{x&%EGFZ+n{_MJu85;aGB08XCc75aP+S}9Qy0PA4 z8_m8Df+M7UN~w{3`s6}@_}E`6X5F}*G(UcedP9Psq63C7=|=@j_|E& zN?bove8#wsG8Z(#imo-Bq%Db2ZhjkwugfnM_OReL=hfvfRa^Qle@HnPwmmcJ?LEc+OqZRW}~#hYsXpHiqWZJd9w+OMVZ@uyx2r5Y*lRU za&>uMJBhasYYQ`qUwL>xSI;b>`PotKC*t>RO_~qpcXK%Jed>>1%hasMUM%d#C>`gp z_v9bHCq~lY>xSXc;F~X?!Mg4^Q9<2KW9CLnIk8kJFd-tDoE8KU? zJ(c@GhW%}UrGJuAMB0N*gz56?RjZG;!v+G%qz(HCzireP;+Z?0r>Ea1W`9nI<$WBe zaLdQAID2%;Hl274q#{%4;(2Up2?WZi?=BpUVV}e;?KLFJCVf78;=Nkmqv)KN;@074 zD!-fgWi7I7A@Os)bp2NQX?6%flXW(K$92Bzl`U9$^q&*O3mZ+Ie?FMgb86GN)RQg_ z?|m&*&1YkqB!6Y6IIX$2^6!lT)pedk0rQyeMwFk9$S4_lL;|1Qe!Z?6S{fq}uQ*9E z-Sm!v_K1bHmYChbLL!(qT{fGx8vF7vcADnbt0O`g;_vhgbURddOMybN54tp}r_wFd zu7)mi7ZgI%u+f4=jk)&hRLeS1YM9&TpRK zn511OprgCCxyAPG>Q!zhu8R$N>^!nDvVf#D3Tl@~oyGBX`v7veN<`UZ@H*0<*!~;_ z(*ww_aJBX~4PUA>C3&c@=yE=w)*95UGtFVZ^SKzXEqFd!D=T?^i+1YA{ufK{gYihO zpStW<18z*$*A(Km2}D*~9`5gY{{Ww}Q0A)KMuu?F&mvCFraJhVns1Z3i1~aN2s7K* zZ65Uu8Jp3*ym<3Xi2Q*PX0l)KV?upd4^f`UC7)PjyteX%cUBKtOc4)S$%!`fdOO1Y zVASI0>BRitypd5JuhaKe4U;m=*3@ivO>L%|FTkZ))$3ja2d6v?dTU4*k(h7}M9awR zS@%u1mj6`xFAeY@JadBwSTQ+ZEe(y5>CBDW}+=bR*!+(PyG#tV07mZAF2QKR8P4F@_+b@S;aiEY)`AF z?_`WW*Gc`##1r{QUTfzv^1+V$ZluUP`R-)>ytbP`?KVRaPVCj#lCAC{Hgup z1Vd$tH|@-=I0@Ek3r2@_cCCZzZZ5mOf{g6_sCD=Dh*?4xf;`!m?$((`1x~Pw@sj4e zGcv1?4%|Al(97@4qW3v1wio_F8)mhz*}&IIuC`+T;{!MM+rq3a71P^Q z`|M56JG8AZSxjHm5J#!#q}9s3oWDNba#=FCZ;@GOg_862ZjF70r4!C7Nn5ynRFLs~ z*Zx&^xJoW^aM~wjf4eOz?hTt}dpQy(bQMQX@Y4j&-w2`utJjFGHudIK5#em22|ja) zI|F8NekFJ(*R?y%dxzfMo9_|Gb%kV}gb95j)&_c;@7N9>2$IXQ8`D7m!9 z-wLIEaULXz@p9fw??*;3rT(un@BIMk@v02Wq%KtV#C}si^mDFTuk(gvgnAYZneXs8 zznr2`8I@E}J*U0GSP+KMb4l^KmL`6^WhuT|&cpgDMNj#ZpZaR-uXd3k^X=1wFOc@mGB?lb^nLAEs%m} zHt-)-@_&&P3#5<|0`)&wo(x_8f3p?~r11Y1AVdrNZvte26mev9{D*5r9nqjxOdBbV z2DPd<>T6hWND0UPB3XM0q~w2*>>X{S6dK7o{=XRZE*ir^F$M;aY^Qa{N(`i1A`8am zBLPvBnklzP59Z%U_@_)G;m|s1^VU!RXipe8YjR}5&w@jv^RQTGtXu72FG_@a1(=0d zJ3gRXgD)WO8v^6D@8dnYQHUJ>*)aM9YLId|a~krCf<|FO1FES=aQhSxs$UQRFY}ol z1fSReF7hYp$R`H=Smu^lx~GBm;ayVezBo7`N={r-^@0RR9m*L|H)ynoxKv+K1ER6R zQ`}8|P-6%l`90qRPM`Ap@9`wS(Xn<0zq}u$OcpUd>xqX$kr59X27u)^s9Uzg>2Mj5 z>oqgT2?E^1dM9sWpb_kZo<i1$uV7MpMndA#G)ouHT=3NIu`xc9uy>bTJTQWpskE|Vv#!n7VT*Fysjhx z<&#$r_txZqP9~mO})FYvQmdo_7Y$@Eii0-`U#sVLs^v)$wRp5SA2j`7vAsi^*ed*&`0>`5) zD??)s;URA2Xj76Uyz7@0`;e0b;Tw|LvGV$GEE0%?%a;IuoC+L18b^V%kD|F%VZ7~!XT#CKYsZj0yKVc(7Mn@ zf|9w5mH30#V1diLbg&r-R-*#PDC|He`0cGKmr@8aCqW;bZ{|B8hDD~30;C0~1Nk?{fyjWiiPa7KVMy)R+>TYuOH zV@&H-a0H^IdWjR|9H3&}pH1iShX->0Kk@eTA*+b(wRyb(dVLjDrCm>g0=vX8mex3! zrn19$tyu+i1h!K9d^$jOzAzDY1qs`=##UNpNU;3%Emj?m9OnA@h2=~W0K?7k>*h)l zh*4g~{|JbIDL=B4v>j#83g^4#nM@3SUb!Ea6~}`8wFzzNacLOPcaPAO3Whu1x?vVS%e@;mI&TCoq<# zt`480JIUf!jX{qp{VBF#0(3noC&cmX1KgLQyw*bd2*>ssY(L6Sux1in{#eWdUW*oV zofSi1;725{)tCvVG9qW)D}~@&zFH-NYX#2-HfzZpBH??0-qO5cHAL0?HAprpgg5=X zcf(t(V0VL!Hb?j#tfG?mt}s`C&|-Q7iywMF%nK98Q@F!SNg*KzNg}-HRk~-b9|Qug ze?9toyBgl$_4_`J_Xb9zUBZ@UB|!gX;xHiWA#`chM%V>aLC)I{u8j|&VDXgk`+|Nj z@I=r_bXuD|aAAoA2&jt)Lj+;CCR&WK+bfjwl zg%Gv)aJA!$H)NSxQ(`*kL#lT#H5aZU+)bgie(qEZ)dK29F(WyEcPnCU=z~41r zizmU{-K=|Z+&PeEkgUzj90}favhi4_&5)5JvJo>@0t70pYb-he;Jip*%fnp+2$C4z z6zLcUb1mJ!#EgQfoRhDo&R(F+{NTF>R~ryVJsj(|cK|awyTY;=Z8$?Id(#v}!$pfj zqx@a~@LiUfJXwwhp&%bM>A)m-L`PF_k5mFYK1XI3k5~dmVGWODxDmYj*zlXC5QTW= zq)!-pFA6qJ>AsyMMnZGd<^r>$7Py{O5tW#PgQ>O%&at&M@aF0EFrB`DhWrm{Jz_D? zr^!kD?Y0k$51dC;W;sFV>vUC)yfo-vaiO3QO@qJ^=bx#-aVIk%ZgA|2!;ZfQQ zxc&3C47R@xZ1C?{D>fB_+Xt0mjXYNv&RUMA5>18`<4&6HnWCvgVc`OWVM9T7u z%ROpU(cAWO@NKjxh);V1kjIHc$Sc20m*Cuk`dL_&?G@wxVMBJ z@2hv=4QTyI2XCisUPZzEAszQ?GMONlDe#yv+y`n|@)%ovjbZQU&aG8JFZjYjhNSjO z2Ac2;s)8$-kc;J*iL94|=bS|yS|$}B^Ro4g;f5EqMzf5OyJP~2{Ka>3oQJUZJYzEA zSq3~(ZPXq#QurGw2HUyYIOa2dxUaY2i8U;QXMzZ91$1>TrjGvkYs%-?JHCrBo8)rbUX0 zbV5Liv{@ag5(Ugkvp-teL!oa{=Xbz^Xb`@-bWHvv3GP<0|w^w=_>;hSRFs!N4-3BtqI!8$-8ecyRDy#TaU-4=QTAAtc&#^%^hJJ3zY zembra4L#f0Oi#4Lz%WWO%JF45plTQDDYE^c$cEipDIpU`xqMk@ldr)!>GjiD%0&44 z1GZ8ghrsBAjKHfM@le=5| z6hc6K^8;Zrl;ldC)0xLWn((bBYRQ8L0iNvnK!FJGQp8hx--SXne=UMA&u%bJi9W;~ zN`@Sx-&^;%&47Q}R!QobH9VnH!s*`5hHY!?`O$q(z`8gL?6{f>9-bJ<<8Sl{&m;$D+tflE7Gi$h;B$wXO+WdZiNfeH51@j z&3@sY6$zge%>N+T??Z}A>+$I`6e8%{|JkT;8tfD7`>3MrwDnkJXt$ajG)^AAYT@vQ z#OFSRc1ihAQo2#bQRxHJ+hz0?=kD+=!X@k#Dgr)buhg6k_<-O$&YcrM2Vktp<`?~) z0O~HsrC;Y`f#`hpOynp6swNTBZ_R^%_9yLd5&7onG@m7JjbO| zQY@@izS`?O3xKTAM&gyKOn8_VZkH#K2{P&R!}Ec1aGLCuZ}`X_1U-%=-K^6<{=sl& zhFKyg>P6tpSs@|)Vx@XpHyCXF@XOk`bV0{Lp}C7S51cq8e+S-;g1D*VIJcoBcvIVe z!9?i?bAecK#>8lT^m*Y^fF~79o7~be8)k*`J0G{R-X{T-p)dYDo^lBCLlST;Cc^?& z%|bDHANr)TzNY^Z2m%Y4Z_3bqgkE*a2RFU3&COM#ctxzBXDv}9Slk;0+sKjZ>g!npksDzC;VF= zxEmkrD*04_0iBoGr!fPlG(YZ|9g6`5?z)){G%k~2Q_{n-F9umg2I^vbU3iXsiX7Q{ z3mI}+7OygvAVc5pkz{ifOzMlyx2qO{z_F%41x^X9r@cDKuBn1(b%W-U&-@VR+c$Ui zE)%{a2Ss2JbplDdhw9LFAPk(b8_WtR!!uFK`yr~<5R%otB2bbA1-XktKll8gB*>+G zfhHH?_Li=$mRiFokK_qU1{bh92{NoMK7$OgYUhZC6sV~Qpsgg224W|9oZc&zU|Hl> zOMOipvYJ`0mH&JMo7o#TH4}2cb$_~$T+$7Q>$Yp&V&=kT#Y!2|;TXbh;S)at8ebGp z5uV-B(}8+dyi$o=6Iij$_(U@63P`bQ=d?Xl@Q60}7K3Ii(1=ftjY@ih`~~men@_>u zilA2e5RP7F23>k zA!KBAGn6l9z__8&dIEPi@cgCxOvaD4cVcpEISk=o%{jEJdp{OFJW76bvYrKdoPkdl z(EKO0aEtV4Cj*us^)|~_2hiT!CjMa?1hFM`?ZjBYz&kwKT2EDWku0SD{o-OdYF=WB;-l_JSt|QP!J>51~t%or^AHzU1OZfXU z*kJq(3Xy(zIbIwY3St*3Uo-yZ!7J=_tel-_=-9rZio9A4f2Pl$9JL3+H<4}XHgavC zZoSdBS`-e3ov&3IB_biueQ14eD-MJzME?LgNr3FUE#AAM5wP&ER{9R7Etq#A#P2hRKpi{7W4*yxaHT7vxbhW^b37cL zL@>pJ`{O6csW|cQ?LlANZ=psI*Ks`)Mw?zM8}u9{oI#KxT=T}y9B>r7`icu@>AZDv${s93lz zcjp(8em2BZrZg+2+d)%C(()qOt{4A|qP)8t4To0AYYqbG@H1Eh-{5B^grswH-4F4C z2N=v$h8vV{UwkM?^`RbY2N3wAbf6GLq?3MmOAOhH&Kv-{IBoksy4Rjb#|^U&RS_99$e-AWBHz z5KAuDeXE7B%5zHXh+qA|D{h@2V=6PXoP?*Zw9N5r7}0 za+w*T2lrl=ve`#xL**L6l~lC?{$3w15~??0v4wO{Yi^Fm-drtOlH zt_0*?gjURo8GL&z^0Js!cH%J9&Fo!w_|Fid?QmHe$N zY7B))p7m-NM*E}97w^tqt4G6B(gI~$vpDz@5M-+Z5jj>lW!`nW(ooc zjq@p%t0(+ANVw5bZUe@9*LL^f%VByUxo2rQ3#5LbNYN2wkj9N43srasWftuYmFPI4 zv^t+0ky9a!b{j9+0ac>W>CHA2rEHVqZ{grVz+9 zaDp;kmg&@Va|jx{?3<-7hkK5VaZESdfOfHC(Ph&QjO>7%_WC%YBi5{lej*l(h~+L0 z@5F#BQK9s^U)f;zaCRW2BNV9846Wus8NN9<7L?>?0jiu!xO|`&K8c=$^R8#ZTML41 zow@`N>)&}LEZ_{BQDx=Nb`pTH^8V^aJSTY95}1Unwgna@7P!~h430NvQn70@!N7H% zt{{jOerHn0+!=Zaa@!t~pX-x>sg2dQ4!ut|D>fMKZ==^y`g1<{Kg=LucgIF~A_L-n z&2dI3IfDM~{)F^iKByIcMEVN5gBymQsm&TD2uA$SVw%4J3?mXHF0XjNW9u@a>_#aF zoYgTzt#|{8?sCg&u_qiby>*#;>H~lI`otz|{h%L5D3;zS6C{qMH82OzxcyGjk#Sir zX#9TS>Y_=}E*xjSD^Fx!s-U1DmAtjD1c`N$kgj^+xiKhq61 zWpRQhCk>n9lyP9n+rQo)mk%HMapPW&2Zc z9+!!LC#JN9AK#D#l9A`qL)v#Xr^GIN}<0Wyfi~!l|%28n&t}s`9 zGfM7h7ATN-^T!58LTuptjn%bKP$u%B6#4D|evVPfQ(u@spytGkg8MNT7Abt*-z*2% za=5@jzppOcuT4Ux*`RYaIChuL8~%E|`teAh3G~b7noQzzfx8l;g@)K0=mZJNvFv@| zO~>iu8-zL_U!2>mu;v9bl|8L;>7j5ah_e1}l?%QmVNd5JeZesoIjna(5JXu@72@k3 zfuImEGwsnph*D#UZTDaV7$|v7Np@ zMf;g+Ro!}QC#g_tP&EHeBM=tpY~v(EGa-J%-v@vCE(B5RJ?c|01{L4duItT_aO02N z*j`jDh}#A7l+de#_P%+0^cE7lg6+Dj2>OBf2U(uO(@+q{xD*U?k^-Nd7&<@45IBjz zUzlw4g)-&1XV=sp!ajGkwDwpe>^Syl#p4Kr&r*u{wKF`Z!L?j_J*@&}{)O}85LfBG9%3A|OI7#V&40*J~DdD;D*fr0fwiF;fuAl#_l6j+7B zVhA4NC9N5Vl&GxSy($may%-x-9Sfnx+`11}GYssOI9|`3+rfM31zXbKPsPXwG9w_oX!dHU9tC_RD*w(*Ee|6v);g<>BhdRUBHa3F3OJr_)}|~)Kw;1E ze#T%m7%hzap|C+hbKfwUXU2j5uWlPt;Y^scNXesg@CLEy#t*5dnc#i>*M?C*D&)(n zsdC6-!GMui%}cQ)c+cR~nM@u7zehY5o2;`ypJ=fBlc^V+hOd5dexC-IxUS$JTMtKj zwG-r4$1u%Gt(=D(2mF+3&!Y+ScxIfe4whnI!C{L%!zc*CaONDoJ2ZeNl}!kH@G;`d zkz{Z&wFKD;I>bx@0Qfw6M$xCen!P7K=O^f5iiJUer>82wje5U$7p=0k*Sx zsDwf^Sh&SfO2vf&Lng_r6t)V)o9F4xw?x648_C$Xk=XDcqC>v8HWGG6;z^7}eZe~} z#&K5H0c6^1?Wi@-amz@A!@gt?JPW=_R414T*fPm6VQBsLUT1QJ+fqPJ1mVa0qq5UUcuj=z%#R zpw{~g389LfuS6B2z;4K$W7;thZq_q7FfEY6t>;ATGQ)!im3R29h1{{wM>J(V*oTA% ztx{4GNzp*L^$RC^E)U4V*4}O@iUOywPyghi7X<3_d`ei5#4Khv z0)J{w3Y#M0)l|d?~y17*VCk-r|SFMu3c>N?&fIlAOM{tB_7H@;kd-+T; z;dnSuSQOAf$8kMT1S_1aPoN(aOGWuK1o+*~vu022V70veo#^LG_*-d27VQ-b1BpSs zNO4^le}A(-kV_I`W!qaD3Zj5DDQS_d5}l9zF`&F-r2_=nj>EN|ouF-)$|UvE2%;E! z!9c=2AGDV?6}aoTAW1=%IUhd)Y$R29iBQ@Qvy?BJwVnqxVeMKn25FGqiW}#^ZwnkW zF30>XT@bhY?Ds=&1bkcirHJ(?9v%_dw4|m4z@F5_FJ`|8h;So&$y?wB%FWE1(-cAQ z^&&?3@#;GRlRz*n)t_@Xzgx z;#@!l3hpHodV$A;-2LOrRKPNQDe<3t!k#AeSJRp=h}r?a*Kfa+fq@8*gkUW>eDx>b z*1RnSIsV9rEpb;k9>1@Q`8FM7s~%H*LHh&0=d&S^C+IvI38pHMIeH!J`S1sPaRqg` zu33F_d@h(iUH3KBeb74iz}3^s1K7ZM1K-UUj6?(5KwA*}W~z&9nVf-bX8-NQu0QmM z)i;;W0^Iqe&GM_^F5Gy3?ar6B2*{d2l0M!nP^%1ucui%Y(9>fFIq>cADe?5qa9@hZR=j_Ru*#%M?pH2aM~W(4eU z_NQ(;4)Cb?%6wufI*+7meAlpR1;Oy~@k>d{K}1K%#<|fV3Ss*yFUgwD1gaD+xAZPd zAhh+Nj-QnWbdn5k(lR{&vPs8vx5C&ovgfjeo&~^w^Hrx38bBgfAku(f$_&syW3av{DmD0wor3}*Zk6i zmSq^=43D{$zCa<=gH%M0Zrej58*wf|+!h{AQ?Au#%0M{j`7fqBHV`*S@!)5#8Ejsm zb?_*l1_E`w@dNTfgt8xo!+5h5IKJf1{b5N4HbHs?JqLksm=JK?>2MrTi{X_EW1g_H zW+u{(=4oV@*euoAcVJgPrt9-}G;jMA$rIUZ2lhJMRXM*)L5RtYsS(o|q&uF(vSQl8 z-P(@ZDd=$r7o|p)e2IdWa@Y~qI_=<)O82{So*}3e9xP{|*X=z4MoMOWeF%CRJeON+ z2*y766tX)6VE%#g=H5#mC|hP&(?vLd*34)}PX{ab4l16jW%l$;>Zucr^| zDGHIahu7ElHwH-V{uw{O*Mx(oE0G2Ga$xM1KYHs!AS4V2tuK5+$BQ@oHC+8kVUT!E z{iUffI)3}^;CmOWq)9x5Mm*tAj?5xyIu@Ea{a29yA}cjW(Uhf=eUz+IpfiSTJjTDXwA%%aXQ0vfB)RpBddEan%)^ z{6*M0jjRFHF0iH1-wF@wzvvLFpz-gk?8kvbqOj=;9AiJm5R+P_1PN2?h~7eP%Zzqw zVAy8)VDrNT`d=&Vh-^E8mm`g1Iy#gAJP23nzS| z9AT&^QRKa)J)Gpf)WVH%fK%Crnm@6-K#}=wkQ z%E7k;hyET9O_nUU|GZ+23T?Ns6t?BFrNdx5N+Vb{CIp(KeewEBP>8CvZ_P2-^sxSv zfAh_A6k_pJ0*B~!DiFrZ&x+txu1CWUEzO1D=LtR6GxNj&=C zFFSTcJSzljHE$HLMM**wfw8S^H95SNGv*GeG6rSr$A4CSpb)iR%>@`dBq6zvY^}$` z5N1>6It0;i3IptG3)&|h@XqzhZwfyrSY>FxS~zJ2A9!MNqb58-rmyI>$6qtR{(E&f zn^qrmdlW0SQ2wx`Ji6>;tOux$XXZDHgF%*P@(SDJAYwmBbx@Qx6sRxh&3IG8q1WUf za~x|3ali3aWeIx!9olvv@?FE>w~oI7i}!bgZAeXr=2cTTjbk?PXRU->uU6JFPqbZL zdfU98U<_$pKB4Q8O5h&8z$MFT23OSz76V%Hf#V+4dTxj(tV?hNY;9V>o(2UE_PH)p zB}a&MsfNJoe#5!X^2-QA97#U=bO(t2WPxJWQ3cObMUJtFY}k^2-penn1H*+mUkj$M zL9jnwL8hb#lx%L25)s*e?MtrP#3kO~p6Eu+cGnD&X&T6v>;ysN6TA9T@RxSgCr*4!*E4XxY82s4@F)k4c zR2k<+y8lEWsM03sbxu71mrmKoV#y6kM9e7wZp6bYYUOR}5HxIslR|S~D3kP<00Y17G zJixcQ0^2j9Nf-JdGB-&Y3OpCN+T5Vx3<)(H*eK zf~h(4k-%82_=%Lp3Qoit$v5x2fOok3n?glR$ZqNG%RI9M1FO9tFIxu~8!(x`BN;^O zVsAgjd?OA=VouGs(Rt24i<_&aNA{3ZmHxtf1fb)7toTEih%`cM*Gn z3G#LN%Ac|0pfibFrQsnG+ITFZ60YMyX!}pe@3kldVSI|%v7I`k<&&mpy~P18i7pfs zohN)=z)-M9=L>PD80p!J1fYL9>sXeI3ut5=yF&iRf$oICu`h=)Iv%@5Z;vYk-v!r1 zjC9fKi1g@)KaU1BTGe*V)b&93Jog<3=OE(m6#1Z!C<(;gI6N&YaR3TEGir8EdI(@; zg`{!OY*5R#WX`Y{33}&IazBuY;QZXyHK0@!mT8W7WbkR=*Pq~qSY|Y? zLVtYZTF3^)r<4c3>0QB$#O)3DCpjSd_5At6LN~bOdn}t|BnMM<@9*~qYk>{v&!J(z zP^eeL^j5sFhDdBEa3;100JhFvQ7wrekUM=o9z3rDXL_=8J@W`)k-$zDvK9p6Dvf8N zI@WOYGo8?KrZGfe;(N2b^o08HO8a=RI51F??UeZz0c$Gr?+1IWfOXN+K;#xWKE0El zeX(f(1P10kPdgOhy-(P*?h!KxoG4DzxL-Vzko*0d6>3-P0Nokn?D8Obu~{ zn%CFG>kG_com1adr5qTVIWi|T0r%LOYYobdkVc_6b0395q^YRUx9A(dtkf|!-aS6>ALi4zkM^&kVe*V8 z=<|E)=SG`Ufeygit93m5NFKJX_*ok(iNFMgkJQN2ePGP+cx$$VKDWmzJAbBZ2$*Mz z@^Q025iP=4^#k8rpiML*<^|gSviU_WmjB@Z*?0>3x-%JQR>Pk0km7>ASKj^2TA^Sg z_Hnf`zz>RSYjw_TW)M7I3>O(Vw1J;|b4H@v41B4Ci@G+^{(-=m$(@%8GTLtKQ?j_i zXV`h^8~PEUJAK9$4z!6d->t%zg{-j`Pg7MDp$R{+!VOm#OiesI zJN8hB{=8rJ*3AIE^U$&RQU$~PO`%2d-?w0|6^lq>PaRt6ZO4^g%L9&*L}wSLBb>+D zo$G%)LP!ncrDd{Nz-OL6J$#a4z<*yo?ZG(~Q1aKm%0}n$3q*u$W~;=2fv-FG?FSsl z#?y&ntjt-sQl}V>WnEs&ve_>Q9ALR1L#={D+^Qwz>?`sMX;_X@ZybS_vtgihE!zc^5 zZ-{s$7x^qlQlS64ce5cv9HyPUGjgB#KsdorJobbvd>J=7>o?+pQfaA=y6Fl~%0QKv zh|bIEk`#!cVoRa)nl;ZX+CL24wO3_rK_OyiE8-V~Ptfu?pR;Z<`yC@5CU5s{EgT|RM=TrKFwlNv8%xb_AqeV9 z6$9WmB_NpTdj9;Ofw1oNWJ+9j2vrdq2n!;F&GeKt%$uJP88xUc9GRyGN~3BuwqGd3 zZE|{^PBlFUaJ_@A@1YB-+I_#W^J2hXgHMM1iZ>j;$)UEcLLr!h7j@@H6~QiE$&BxV z6J#e%CEVgEg^q``%Uhq&=WF7Q7ZQ`u=UzLDKhy9#gCX{pk`L(5-T2wM$qw=az@^7# z{q=_gK-^^!)Z8(LSB&YzRmz@lF=I2#6(s{fzCSJLT2Y9b$G9)>lN~@Q&ZM}Q)(GrB z3bL*+djOB6Q%$~;7p!hHPDQdNz@Y38W}I6ai0kk0yeQ%v;IX-(U#XQF9H!tzu z0XdGU=YkSM$-KE2tR@F2(RTsQPSA11FBIhzQikBb+=Q$<&<9Vqw0qL|Lx_;lwP)eF z&d}lO6!&t@1WGO4ej2t$!*7ERiu|ceaPv1SiRJO`Y4(R<(h8VXp)K&t&<5Pa z-Pc+Qwh-O)z>ms=7h0q=ulmU=14hj75TeW+mS?)n49A$D+j;TJi?MLHA->B|Qj7#r zpEg$B4My0FkhC7_Fb1Sb0cksx132&o&7AVEfD3m0m7P(5i?qyQ{qGOq&W$;8mY)pp zdwZv+JzE02-Z9>cdu;@Qgued2St4NOW6P_%*76{c`Y|i90fh+JYglx1wFU3bD$k;D zq`^GqsbMO%Ah^xntKLfxf%79L2cf_(2r_<|#^F64xNXsky8mh_#EW zwM_={scmjX4G97E$ORoBOhHK8k+`_a27jXil#M(@gln?v*CDQSt{6`SP#YeN_PR`wp#4=yO5#28z{(HzMF}=Es{& z$U87v>0N0$h(av$JRNkTmjgTdiQDwAG+>#pV@ase2B`GAxkOV$LB&X6k_w$)cNXI@ z4URSjZ`aRKL7$m`Alg~HrP~*nZcaUMDK&(ttN=5k52TQ$XixFiP!A3pXDJXS-e5!K z(_wI84t@BBu5lbjP@!&aV023fl5c3yhZNYstnIvEkC`3RP=DKDIi`T%vs>BhL&DI` zN4FbFX^h6-g8Pf5XdYRNA7vje4Z_u#dcx@ME2Q&BKJpx$B2EQ!#!lWpgoO8C9ED@P&zJQQr4Gd6-LX?BH1 zd<$?UR+?O@`GnZ- zZ#Xy#Q-%#($_ch=Zdm*(dHR<`3euuTAx_Uxj>AuU%AN2WvpDIF)5hV0L>qyEan8FJYzl{{0 zw$p(IDQnigsvoTNvs3h+M1n~#;RYd=7JQsle3o1q4J5ls9t5?*(3nDe;~IA)V97oH z;Q2)#x(xXk>X|S=(V6}R~%J2OCdR`Ik=ef_h&*yxu>w2F)$|2gRVsn)b6yxrY(r14U;gw_+ z>)X{L(B0bhbh_OT%wF+cKLo@P&Te}yd8v;gk@rbLVQVvfB?WPOY$5mMb|l_E4#C&p zHru_+dN`HmELXB$0d38b7gIFo5On&Kh*u=>%`sj(9}#bYC8>n1M^9^^cIZayOpQ2> z*P6A;9+QRA7?a7BQ!J!jW>~7=a>Ktg_uplPirA20Ep~UP0N2)Tik@Zmf#Mi*aYH^U z#P;pt{9!MSpB5!sb&1ZsG?!^GUFnRmr+RZrJw)Fa?GG#XBn4xgjWhfW%s8v*&FN9A z0ok3qx?DH=K`xQ!+*2Mu42QFy5oKgRHpg8)Wuo&2W!+#Bd}@ngT?H-r!-k;S)lgKu zcP~U3Rc#9cG~jgcPCwflO&HxYW{PvtN66QhZ4YdmVE0rkuQ^Z>Gp??zJ@K{(<=ehB zWw$oulQ#an)Xa)Ucl`d9Z99(fRj#qobt?Eg*CwLOQE+iLewriu{n~(a*Y{5&F+E$Q zPBC}I?|v8l%~@m}IyxJEV8sl>@@s=_JEFl|xWqK{)Cw>Da;X=S=af}QTHOO*9=y7K zm(wFA0RB%-$<->SpuMrK>F67M(9PJnm!I>(j_Xx(hp34j8D=lEzNAgpHopIJX5OEM)$4n(|4i}AA(9h`Vhqd?9nr!ae(Jmf3T6nY+;OMy?i3|bifhYQzBRNj ze1G+3iWVg?M@KiEW7H##((}2x*2D@5WiRK_Nc33~UWT&~V7F zBt9|XQ>|QjN2Lezvr_31OX1MT;C9^mG#ScVf-#h2f0SImQRZT%gp8yiuh0I%c=@h@ zouMck-3+@-HiO@Kg{`WbvL{HGVZT`KHD4q

~as6OmO(G6V zvP-;vnF-a4f{Knr@3s~YxwVnk9Cp=P>QnTBaAc-%X}HrD-{kW(80R#w!OGO?M-L0M z=nQLl)|8>bbK$<^I}tp{(f)C6Oabe5nZaJyP0?DFq{Kn`p8jQ`?~JpaP>@qzYWPSS z5iQGO5>|K7dn9U>R&2*JArF}5_Yb(6$H36|tHwN49LgU2y%V)z z3m$LR?w{MJho=t=9fdaYVJ4ZiUQ$U4`#!}w^s^~~dAf#T@hl$J&qg09HXlW}XQdHW zwiZ;EM_7YbmnmtSFC>3QpF-Sa_x#V5Mwk|NFiG32g3ky3^C|3dgc!HI>WK9gu+;W& zUib0B>g{_r7qc_)t3;AUmi`yTE?QJh#>o?7y5lc7cN-yM)?dx9!5nlKp8L(c&p<#8 z^Zw%Scq~j^UFY~_4h84;KW}!B{VcDbedAd}M1BjI*_B9-Fiw}w>+)!+?a|Dcou6A&yg_4c7o%-;0P8>QK9}1qg>FHHEejb& z_#s49;eVG6>2hDHzxmriOJTdk;Z;jqY1Z5K$&bvN>d7)~vhMVUd=h%u-bdLNWp-_U z)Bo!U8lT{33rM|rL?i8W1{43VMW%%h@AvWCk>-{}vlZ9en7IPV)Jf9b*&W^Y3_C=h z`#^vx&5)U_+n=+BX-<%RTMg65s8>!n(Vs^0F3|s`Jbry;Jxk3UwO<$;49I?Sh5GBa zm{1*D+b*<`Dc6Yf)7(Z{qqsp|YV`96qdoD-R0i|k+KcptxH)Z8qTYfEfv#OIL?~l%3V8#*Tg^gvHyqohgvcZ z$3{L~|t5B?R!371}@Xi0mh>)og0 zCH=AcCr`&e*&>9a`FE9rzHU;?uWcFl&?Uz7)j3}rH<{4leky8FrP0m|ov6oyu?!rlDRv$8q|2=Rt8UF=~1 z>u=Wl>)$?8KByi?-3lWvZ~RKL>6j%x?otY;h|uC`kCb!egaC3vUw@AbU8fit?|$St z_>=PHwT?u`#~?gZw+a9HgXkp--<=0*$$oyKrfb`0CFn0#_iS4BgWnVBX}J^&cyn)m zx?ZM&fM+?C!K7|vFz25LC;Ik|_Uq>la2lYXqg(!nrXbYhOC7t2uItfxD&SM50wT@T z&%QrS^2D4I37X^kkzp=kJC$yM_j@M9byDddIBm-x7~+qF`&o~_^m&u{cR-!JM-%^T zco(r-e3`N(hPHdFtPKp3#>W~gMbT5A$zNJzjLlXXqeKj~v3bD#WA2O-S}QDOLciG~ z`{xNhrWOa(dT6IG4Km}R^6~j=Z%mPMDl+(awyzPTVEeP7L6S&=hUM#^l>vxeIazeEHqkvtairGAo+db&ea1I;H1#am|4~0<;O$( z3TuHlCi5olbxRgX(+tnF`DNmLWAD`gl8gCv<`@0n?KTM6aAA-3`FLEKF?!oIN_>U_ zhb-NE6+q#wEjjMz45vN*T}$)AP!oPMohIxAhu5{6?kwnGX{c_)PcBES^LO2%mH;07 zVH~In_QbI#*ILvSQ(^0{eSbTtPe;8s@ty1JqkOm@bHC(99`2@J(K8h!ITv~PTb+N6 z5$oii-S^EJ!o>?7E%)|fc|yRV`b{xJEh8+QC$HcV$7E}an;ujx1{fYXX27AV`ont( zH*8$weDjz1odeg^j$D3Xj1r@teRM|QkQ>j-DcVNro}y~kJzKp|sw2przFQwtDf{Y7 zs(i3&#O$T+VvA^oTed={eDF3_{u-S+BV@EXnPihh;d=Q=yAb*OH|=lIS+rWy|L%mK;+K{!BoCQeoG}p*qKfY9jNfnXZW|5p7+o5v;Q@hL|OY8IE!_^vGjrsZg&Xzok54(8s_L2JV+|&!>N)_D5 z^*!JrVu0R5QRkK}rQ+40)!ETzqCe6~-ZL7C#DBftm^f(!@TlSMz}NXf3^U8gZ@NKr z|AYh=9~~iFrG7ov{aFqh1csYf3zLwaArnO*xnP^|rsA!O7KpxW@73TJft*JLJZ5S> zsA3jMl>0_nRuub`j)zl}W1EZBI`7yZqh0O!mjQM3Q4gM9RrY~V!L=Q^q<^2_`NnG> zy*^~xeo92V$VRm0p7!_IWc`sbvtXswCVn<|*K70Vaqgbu_2d5>aQbWD`S5;sY;1jC zBNso0TfgMy&)bDyJ=p4WTekyB+`=PfD|Mk3&iy4#Rvf=7r~Nua&0tpl`^hQdYu%82 z?V(DhB8sfGxcsv-gt+t!<4r?BaPE7;SboS7)E8y)(x$AzefM^%p9eQM<6^fwd1MZX z9Ob}IV^h>;@XkJY5CTIbmzlUtS}<%<;J8F%2hK{`Z+y`K2w-42p*OT2Vz+gEicf^% z=8Esz`}uLGb2d)ZWXZ*dmbCoyHf7v6C32OkMGuW^#*OSG-&vA&_lZ-LCT401Y+Muq zu)8~1GiPfY9v)tJR<;ldlTo+yH9c!kg0lTB%+G?UNUgt1(+bziADk(vC0q%n_wMVl z!8l{_`|hc10sN9W88md<75Br2h1aDlQRyRP)AGO?N9Gt+$JuTpq*wNP&QW7rwsO73 z$ghC9TF;I@PYyt{bGD@QzX;fj#f0`!;xV_&td1pMnbLWs@0Q`TCT>wwPW4q=Lv$qV z!(~f0h&*1EQ*{W$a#^yr9vSC{9z{@vlJOZNFrks`ZwZ-@?+O<_L?JKgYT|2KAZ+>m zt4QJlD}QTi!^qNt(5efc8&(UjTK=Nx3Y`lSf8P1NxJ4AQ{2D4*(+YrAvEfn6CJ0LH zt!1j}qbRESedN91L-dY^nqAl2Q1obJTN~LIIyG07!jW{co)yc$s5F0avqU^(hf;xj#zp?r@Wu^x7tM62bb(ZClUN+mPA%%1gh?0`6KmOpGL_ z%6RP?TiOYA@I2b_kM{*HE}YrK&XQ?{Cv=)cmpD^!OnlheBJ>8S+g#0-jBH_AT+8Kr z%^sRz%sVSuT`|(;&SzmNg@Jji52ERk*ekfva~JW0rqvng>Z@A7Yy6V2BMTqQ{Em#@ z{^f{U5Baqp9CpRhy`*))P(!Sj=I+j8vBQlo%ffq(_fuLXQ}zi7N5Zpa%K#gv3wEsc z?ffj7fe78Bp?nGU=xR7Mar{dPhPkPkV2i%RW`~+Vzf|VciY#@DuupqYh#gjG2e|s-kF3}YMfj`RA2ZAv1a=4PMNe4ei zWK5&n!tk=tKkH9>p0QE?ft5HkB9UYc`NtP^WZK z!`ka)ZjpL3BqfQ6?R^)7UB&JL@m~#Kqv>BV@wy0&%hfy`mq>nahsOuXpg)c-U}QKW z4!5z#&7Ug|heCb3&o?CD`fQjKUAHgH)V!t&r}n{1_`C25ogu6@MO~E&uRt3?Hwer} zW2@7jxIxxVP%?Hf<~|O^u@j%lOz!$2)04`sf58eo8?`IgJ)=rfj*at$6fzq<>ubz6Q% z{O5!zzm5!!R0q7VEG1AQU6sD=27D<6czu=KF2Fo60Pq?uRq zGcOW;%zL3NrwYh?d!A8v?{x^)98RU3xa@}~y%O~|wTz(i@S+u$4h2&RzvJJwePomn%Z?f%~CE%ZVJM z^Yxoty~ZkB(wA%&n*o8|lR9k5y$Q|FqLh=N0Be#gE)g7$mOvTwf{65Z5iYwI3Y zj5*w0*3c(;cB9e`yZzZ{(6QUbUKa&nub^jZ?B;meB|QGM${SOE{bUmI^>O{my)M0? z6qqYNo`3th7&a#2Y%i~S<1Bx`%Iwc*{CiX^rpqD^&pkoBck*rUU+ICRFee)vX`D1w z_H%@aM($o27G0DI?5;Up;s{>VR6^n;`IPpeTc3zOcVbM%da^+s?9?Cgn@DcHv<&t^ zXZ11ss%hQqE6EcwnJDzDCg1`L_T_{4d~WCUOaIn{V^VDGiA8tBolk0Pr*g+;(=ugV zO??>U4(Yv@w1uI}p$TP8AGE#e_AZf-#h%~qY>cG}F|cHKf39&WmOd=*o2w*!1=f_` z7P_Qcwte!nup?>m3_Z^i=mMJL{61^YmiS$KQmY$(_- zErwBV$%I2M>*L@HEV$Owu{}UO4OH=`K6&dB&PDk4*P7)dx9~QWA$@WOJlYHaB&cDXsHsc6vc&w2G#GNgwlRfBpPO|M={2?iHC3B}k;PguOf>_5Zv!-86?S z?5loBr^{R5>$VYYp{+JhO8;^`BFzH(SZ3qtiv7_gcZ^ZbN*2*aM&zg(ei5ZA$S>-~_x^-|^M}l{OeEm`wTl6RP8<1vns1_mr?g7K*=EHi3lbY#@ z7WK#VBQ!rYvJjnhyXuYIi)9GtEeX=PVF|8}ueJV>@#Eb+BW%{Kp zO_}n_9p|T}S$v8#Q56_}Z*2P#WkiJ8-|4RpelR>Z@QUp+QVW;E4sxhKhd#*ij`rEdxfiJm#$^WrTx?(=l=yxljV*!~<@|;&ZH0N*LN(nP z8{}^BaBV#71VOjVpkC?#+A(OXgOzDW~-8b_a+fr9L~ESWoC8d z+I=UTVy2-bqkBbb1+vMzgeMeC}s(tUNMe@`MFkgDOeLNE2Q1g72)z_-l!f0Y;WCi}?=eVrX#CDSspcAM;n1CR@{C8p671w9pJYWIoGLoe0K_mHwT@13rkc zswk=wkVTGO50g#Td5n!{mw#-m#p;XB6T$qXzgtr%s(V}kNzGdO!cy!(?Mkz#u_X#m zZ#$jfpd?^=LHX?l;Nm!wWiJ8a*|#j$2O4js(r^CpmBCV@`W7V)DoI?UrOU z-1Ca`)R8@ahNREQ9mFs2yesMDdE)!>Q2n=Y#3>uepYB)we}6NPmMQ(^SQ^@%l}R_w z#Dm81CSTS?Jq+4r79?FZ2ans>&5vZi?w$I`Yv*2W9KCw+^WwH^=%Jn2&esx(pLOzF zcg%f=zms+KnSC+#)#S<<51qs09nEQ`;VfvKyjZeC{?6j)L&b+#S?KTI%=(VI94B>L zLxOb3g1;_R`tMjCC=6mP+{UqZ^(J#I|AGpnyN~QXZK@BC=*WXTyJRtT%y=|xhYpIJ zPX6Z2@kejElEN&bGQ=GYWg3Mk!YZIY)@QRbd>&FvTsaJ(HER}PSEvs{gnarY$r15P zP7k`q?I3o|{^b^rpOo@HpM`I_QINcV^WQYTA_CyC(!oFY~XTT#HAV?6Kg~Yhid7t#`X~#ReOX`nc|O3qg7Pokz!# zeGp_?Cb_8Q2-AfTgQ{^`yc3yt(R|Jt0WUr)3R^1Ry@7OYC-G~aNq?tmH!2Os=exvb z9+7;kSbywolBenlEOh6sAuB4Yu29c$XE^rIy?78tcn+&Yo>`=xce*~YAXZ?7MyZ*O z9odiZT4c_@%BvVYO0*)Lyixd5cKh3D4g-`0R|Ti%4p44Nu;;SZ#Nf|oD)&&L=Li1_ zGGNwE#Nv@sw+>Tl)V*!-@e($Kl$t}=dE%$Iv48f4PD~|yYPn@*a`Uk14>LRGS#5}_ zE>kC1XX1r;io$S`H$)P$t?BdA@jhHZ&@s^$R_?01>q+kF(ZY(t@g`3E-fK9)-scV) zcK^7P5d-`hKKDR6Fc1lQ)9O`d-3WJ#)xq+uJXn~!Pub8}fZj&ar1QHH#y(|D#*uxN zgR9(h)u0hhgj_VTtWSsW;U=la%Wg2u9(+8^UH~Zv(c#%G5AodS*9ocX5eS_LlxiU3 zbf(BD!TV4)$-P-IOwh-{LFM{WZQDCoqE-oydu$AiwTInGa|h8bW|*L{ts3c@a(761 zm|(3UKV`D%HYVAE#kf-{@xkg(wCPT3G-OR_eyJ8lq(Ge6<{BSt*)eI`xn6^DMlR|W zhwWfrKbXVOQH4DxHeAo-Cp@(J=Xzg1)Q=&j?lpz@CjG= zt#dhnC1R;@YRm%3$JaL*Y!HOyzV_{x+zfG0!&hz5(B zHF6u!htk!Gbgg$*pbo8+Sp6vt{{2(k8#y!(^YFLvUQ(BIvd>1F+}DTPg~zF8#9zeq z_Mp8O(V<%NbLxgko$jOXW_4gx2Qx3f?=?C~bbi`1hBu!2VE^nN9*;C@_`J@R?>g#( zmU6z;8K+zXS#Nl?U!xqIXUn#B3Fsj8?Axu*tA!}A`RsZ>&H+0IhuZj!`SGfEb2ay! zD(vv1Fb*o}VlJpYb|LRP>EoX%*|zF|4JHpmgWp>t^K5=Fwew%f&c_wGd&qM;!DWl$ z{m<>_m2AzZ^7e$duJwR@%?;2-w=}Xd8{%@iagIZoACw$Z1YRz-B7D1XOZ}QGJU3kz zKQ2~^?YgolToVjXPF5GrC*ySY_Xc^1f6?ISs}(Qxt4CVo-m?)`AO8uY&_Mb#1LI`^EiJs(GxfxRm2g!&25=hh4dBW zas#z0w_2e>@k9I{wMaO&HCi9hNgUN~ zSLe%P!g@aVC^2%{r=tYABD^Jmdm}LCQR;FrA_^ZKp4ayo2BK|Il6`n`;%O+}U5<7g==OxXev}wZ?*_ZkUVsmZi6+X3=Z>4 z+_u4YF`mR?2}itQ6f(E4)kVdt^NQbI0nJ^HHZ9jA;PB$Sk0YBpHjl(+9Y5oTnAx+` z4Gu2Y+*TMl9-xoFoRw&$_s(Ewt@*@ol@|iO&k72}Oy*1XM&Psks4_Em`nFyx|h;D9JI$wAO6KfHyDq0qZA z8QneR1ZUParF_11csHT&?%SX@C`!yF_K)Q7<)38CYf>-JL<)(hRVRXp<>PynbAtaUGn7iBM?rn8{?|M%;ImRJqr zdeeM+|mC?zNh+u|3)U$JS2qtnclY8fd=#U@%@BXX8+%S zp7ov-l&f>F>2PR+F)X3r@g?FzxM6%Sn&hTzPce*FT@Huq=6Zq0pX#9z{_pBV$541a zf6Hi8s6}$MrIDl64kw-X$98_zdbIl$y9)44yV#o$Q0<)+08d+el_zFr|HhL29H+wT*FPwgs<^9At3oWHbhoR7!f^%Q~zzq8TuI5ux>~>otX)N@|!;jf7-)2K)j!Iz5w~0 z^&to9z0mjIn&Hp?Ki9KQF*?kLz$9gxNSTESdWNDdy}o3N0h&pv@@JYb=HcIF>0<+W z?Jo@@MRxdc&01gI!wKIL&URFb*ka0wLS041$Ipf@g3?lY*rcHJN`m?<`t>MVu87@3 zW|GQ=D@K{PdEkr8h7lEnbF~dvl*uFe>lU^TM1QQ(x9bc|V#f5zL*o^!X)wxjoZnb? z22=xb44k7e*roN-O7oNr(pE#bzL0!|<|i`>yM0*+@|jPrv-1F~(~6P5rjXBWO>8=KndXTMZ}X25b8$tRLbx zKOnrHp4H`yQZmoOT0gQh1o|RC`uJ$Kg$FLUcqN=j^@GWi?31&XJRo}7n&E^_0_MK$ z^#9I6d_xu*64xC4kSaM6ylJZpdPe&-qqRJ+Ir~+oq+cl%|6L2B84Ltt@>^*)xuduh zm?2H|!xt8Ndm3tJ+@MYGe`bLA4E%l0%j6b@(h;vYr%R7mqKygOWA2r2_hR;4W=ka4*Fbf;_tJUjjGf8D8# zqie;Zs~bOr#>E)kz;{Q75Q-fz;0z*KS`<=U2tw4?a;M-}S)v%QZNi zNf<-vPw0GPbRin7FyF9die9S33Y!ZBm=~y@T|amL%t{Pio#&i!xs6Hnyix&PZ4ID` zCcaPUs}o=6$v!KYc{dAnARB7*lBjq!bFg$&ag9g(0&M4Z=VegYA?>DXs`rgLoZOcE zWJJ*qlC|4}??=Ys>_oc0r}Po1i>}G0?K%(ju?@a~&P2C+)iiyFa0fZXKIP`U^M&U( zIn|_-fhefiq>$F=imxHHbJIR#-t0Q0eQ(JHj>7>f)&VN`zJ>nHX<-M5$&R#I8YbaL z>5J4mcS!zDv{9bA+W>MC;iemFQXpYlz}(!Fi+`NLcfa0}BX!&A$KC)#2<%q>J?oM{ zI2Ylg9d|);Gg32A6Xf%#&m&Ra0Gth&YUpop!5O;hQ~J+H-b{aGT{&M3mYSc+Zl@RF zP2yjr!(OBwGHSX;H?SSowy>)tYPh3$B~Iw(FH_tLTJjk5IsunpSIO*8M)*3_{<)+t z82|aT#4xwsK*qlC15;{#Sm6A8&X)KCTb4`q{o!`SkFS?CjuttS_5V1{g;l`sjPA*u zS$de0{Gr-kHL;3vdsfjLl)Ghq5smlWioyEq~8#7_fI$3<_qbqDPGAMr-f;i*iEL#a5Z;)y`+Yb|5`Nk&OJ?4%- zUk}Zx)cPWk`_{&u%d(Js%XnzgKOZlAQcZ7nW}&W=_j9R(Gz6SF(*k#fAY;>yd2Zqh z*r0!{PyV4F;tX2@>JE}Tp7QjRkN`iXMW4m*;&DZt!S8XEp=!{!iJcrKpHt`ew$q!v z8jzJ1+X@B?(CS>jes}#p%w@F>>Jt9T^iE}kng57C-zQh;8Eq1D_?ve9BfOVyeU_#! z>z-gxj1B+J=#J?=Wsmu6TZlg?dUI-(_`dJCn;175;o#4t%w*u z6?3eF6P~Dc?9f^z-a_-d5g~)Ea9nm-I7GOWR+s+!mStp!qUZb{Sk!Icf9uC$-KBb* zjhTK+cg_dXR+WO!C@T2bof+9<8;^4u)tJ?t>p#XaG_M|ApCo)_ z83jyk6|m7_+k@k&eV;fSx8TYC(KRE&xxU}}aGgc;BJMKZIrQgX30eag%nv>&zy|H( zVh8fiL8CD;v?8DwpG2R1b*V0e`;C(0XTD}C6k65sNB4B=Qk#P!o@x9lCvid6aBfJF#>KledM#%GTB$$ z%})Ft{dxmF-{*?r80JJj*8_nKw4~n_>q57Q@Y#fhJ}F8yTi~EPSIjLVJHl~s8ofu} zcNn*`?spGQA$aM!YOaHW|%2aM<9b|HqZ`I5p6CZSCBv zA%->u%m3=F?}6{hqvI-v$oSPM(k~^~;gia8gNFE;0xC?(Hj)0lEKT|8&X7ph_c{HT z-eZlD-Ku=w{yO6oi;6MJ0O6t~u{m@fkA>*8Jd-n5Dg+H9{nQ(cL2uipb%FdohT4w% zH&%79BlqSuf$fFx+h^x_<8%S(pFgYjGS>!AfZv~SYePJa*h{5Hbm+{T+Z5}3b)a5G z@s|-VMYjNV)#y|xHYU^X%F%2@Q4P1y1CoRJS0+>(M(X~wq!zwU4LwMcT-?FvWRB8| z^8NcV3FoD4G-A@k7ktZqJ^qF%V9+fgQwh^Ahuu!mip^zPJ3J;!Ht+Pj)-ik+4$};76(knVU!VlFR)*Z1O z(FeOmw1-}N5YdAJ4m&)gMOXWZ#m7cj6t-#arpgFJ+{Pn;lS@We#tt8fXtWfyfg6HVZoJ4n+yyWt_2g0Vu`J2i7?0vcI0R1x$EDrQ* zafAm!w_m%vl*STtS60qVS?fXfBkP78-|ZoC{4}?Of)%o#S;t?rslfSvic(B*35cbl z;bYQwz?vuZYyzJ$hK*b%+B|pR#GUbGU$3T(KB-|`!8V0k8(a_@f=pX zi(J0uk^XpeSI|kqT@zKf7_s@F4ldu!)6C;0yu5+}qaBn&h=giX{RlRIL!X;S)Pe@4 z9vtJdCi%R-BFkeNHxSPAygF^~rW{0ZS$wb{eGbFt`kz}O4&(86)m_I9S|e?l-Q&Z! z5ttkDdN-&fVKvLZ5ecO@u*%Q=XQdE1QAwd{gZB8FXFqOf=a0~GE;k_t6KFMLU#TSb zJKpJ4Z7X?IJW8<(i@!&&zyJW;nv zPI6c0!~fnofSeL=?1HNqCWC~nu8y1F`HjRAmkuRjW$!QQpeucp$LtG@R`rcA^%hpV zDj9}m2?GX=otpT2aqA}Gd(N2sqn4_==zz^HuX`15mBP#AsE(M}i_k7MWWByvh}Ew& zr;hHogX*Cl_b=KAL7v&$=QyhlsWyLzXG*Kc01KbKi)Z^fy}Q@|Mu*o zg$UdNB}WZHZQ$m4cKeRPHeBpzD4w}V>K$!f-rqgK$RF6W^*PaHHuITu?=K{Nl}A?3 zs?h+t-Ya7LxNiW?LeO?rOgeO1@b!*z+*E;%`|nLbPPg$MF@Q?fd3a;@o}^ ze=mNTH$9}-DgJt%Jt9?AibFOM6;480=!y-T^rX|qP5Q%8 z4e~j7eaPgSf~F1J-xyCirQF2b&MmJTzPTdeMkeKnQUDnGjuk%Pi$U>ynbg^LUO2Vu zti(Qw7QW9lHTGWDf|F$P&BcFJ;Lk`-67D*O!`6-LkDOj(QJFR}PQVsTRi!oj#0Td% z^@sn^cr3iWNeewYYY8<;b=8Ci+p#|XvX&**7)di*FD5h;A%206uRiX? zQxE%|=fpR1F~i8bjc{A5$BW(h+AL6;R@0kb7KnR$`(VwN3oDz$@7c=iKs`Cpmlk4( z`Kc=_L1C#d4ZCxzvnm$sZ@TY>2r1*e__O2-4JDB9DjJ+lQ-);DkU-v$HKyX;oJh_z z0H52g*1zn;$8V)~387@YkS?jVd0dG1Nh_|ZZPs|rIW_nH=XMC8PIz1?gVrWN7yB(2 zaJEQFadfT_ak*KPV>_(y>ac=$-YXOIPurG%ae4*n^@R#XZeE1WFXppw%0TXv$dZ$^ z0py#nW=FOsKt-@IWVX@?!dzQ_yG2-`<3psmw}v@-O7kBGjuO83d?HH?>HGbe*y_HQ z)Q_j{ES=jZYl$&|x4L{(T4?IHq41MM1Ulwt#d-*LRmL&Vi#<32s>ff1v-T5z@t3Oo z|FJAmI7C-XdW~bS_PFSSv9}F|ZVxRv>#L$t^6}-nE$YZ$T6Ihwa>wD@TQZbx<-z8K zP`Ev*qj_D@TnxQk@kmqp=X#tZ25!He8e@}1-#NM_yJL(n=4$-W!s~}C6saBdDG_M@ zrTg4ujt(5+x@{hUfl!jynQASP24C=q^7CRN%*XkN*V_4lU4Nqg^c_>2P};WRK(aWt zy2(#)P3Y4r`sK-idFjf1+A)1%?tLeELJkpgw zUs?QFkZ_%t=0`udj2VOJ*f_m)sv;~(_j#!uE<%+=>vk4xCp1OG@UouK!w>5%Im(0^ z`BLzM`FMj5+QWU4BslpAZ!xb!L$MMWMt**LH`VaWMi}xR?Xdi4?2p~u0vz-|;%IT~ z9H^R8#52i$QM1_ESFonqc|0q9+3%_V;EuUBUh18?f1HFMwT zw>j&S@oJ6KvAb0_EFtmixs+iilG-(g%YFA!e9&Tlb(jQS{U6WGT*8IRZ? z)2JFQHMq>Z487e$_QU>74*zJ0UNjQ@SxE6U4%AVZ?%R}yy^89~UNM9p@452W@u31Z zd>n#g-K&d27sQ`l+O$gMO?8}%SQmM9{}6Wnn88gf z6jtB$l28Km_KPgDyi9mccD3Cp6x#w-iJISsO~`pDOPrIX ziYKsb?RV$P_Ik|EPq^P?^#eD{m&tk-a$dw<&Z@(bKt=CI{z*2d(+W6<-^{TkLr^Ots&{;ySdt~To49RXbRao!w8a&JFIH?G#0+T+W9hJiXBAAD8H zVBVNs0oML5N-|XdxO_sm7t`D@r!jCjmopYIK8@ECGsQ8*dfS0NNdxV9N5z-}^&wW{ zDK+%X9Vcot4~bfI;N`*8Bu%R#*!jtD3=`kLQ|G8Ar)fEGkIEP6d~d;3Qx!TEBA*yFK<=t_C`OWEEHX;n6Qho2BWO?cP8{&0J|v0vI_CT)nTsp7?} z7U|HT8ciw=3WAJlFB^}hJcgQYj8u?u!Y@1ce0z@ejv2vt)k z`dN)VMg|HaDTk2phl7`Gw-Dan8r%Mk{J%KErUJzl8-&+3u56~thElP&+NLtXvH$nx zeZNT**gIV1B;v(z+%e;hgltr>dm1?QGA>PQ*G?&R5;CI={_k3o?(B0{E^FFgCY?tfqb*gC~ zt;bh+nphlCn(aR~Zz?7E0?UM;uf&HZW5vQ=P==;Ij29UhZLs2UJ7vMx3fI&m6aQQ& z#l+hkLl23buA*}^Tcx%P%>8YrI43VdutY3jnZFD#g_?s~y)rO&=Cn)WsT@!_d|lg< zk%rQQzkwYC7qDFVPUPrUfA|UM?mr%4gZW*BlkZHmu<5w{6(!abXl?H1Ww;@MuUyU1 zOq4LxZDTIz_Xxutj-;41H3tO8%}k6HpTW;3bF)!&4j{)JbbCK zNlHo)F4tdQWBYR*yS)rzZ@n|Zs@Uf{yB|hlPxPA6>LJ1}shO53iT6RE?1O?*EG5t#6mB%&rX4*$oQ98MG+9%=GQ7q#rgV*4BO@eyv{#vGMM&lVLu(RYJjD z8fVwVdO}pJar$3O(te^Z8&xW_^?vm#>ZP>~4&x!xp@$Pu!yaCQl){YmOGm%{K&&9#b(Rl7tFR{*_1P_|> z`s66Weds(UviM6AR`ZK}`HE}^Il}EqBOwZ-??Lw2rTP%<)Y@6=`kiv+S!qMx7i*Ml z?i5W5%06kPCD~U;XgbkaSQlUZ%3}Rsr4JZ~=!+bWj=Wk`!pSL(8R%Y#tc{los!%P;#uHo11(2 z$0IJV@PCxIwIg*(mn2u9TrL6w)83_CvBS>8W_6#)@A+c%NzpN+0m)mOE{C?6z-WS= zncFmloJ5)Fxkmb?;y2H;NE5z;MIBS?b{ZXc)W00|t@Fi^S>?!q6kkNwOvB*50WdPE zyd>w34#|YjQB9Ek-#G@io0X_9|L^2$@_wW0wpN+MZAH&uZ1#PdY`hM5CntB<2w5+M z^8)tyw;0gc zSFESP67=h~1KU|0k#aSgZoiifR^O%$R3SoZcu-(P+>ms)g@;AtN(cKNhQx1NU z*OzQCCirrr)iy)W{+(pIxoC&m*Aj9>2$#gipW*FeBQ3a}S$}ao!5qtl?dl94$@w)N z^&Qy*LXiDxz5VRZNVLTYEbKF&Gon8T_Nq`5n;+2@M> z#Wt(Ge>7lpcS=HD&6(sZRV89~dV$^D?%k+mD5Q+d+Z`WTps~t<^-qrjW)9r01DE+!1ILakPHafE;M@=ZLwRtd81gb(i~96*k%JX}F7 z2Iv$ln#dQD17p6=2ZJxMh-2Ay~v!zV|iEVh;IAaq+a}t0#QE7qJojPRb<5 z`0Bd$F2+K_4@hg7X7a(#*|tk+0;-^h-i(gVDnrBdkyG{zz`ir@&w8A9MxoE(^NSgT zv-tKCr6kK8rd!fK4~W}>R$NE!1L13W-N~J9`Pqr2hUzWtgi|!9t}1@6L=Q~qzOFX0 z5AfPWkhk!E zPx6YN*L$^-NdNrEdFEHlD;AU0p(fJjRp9L=e{FYL6y!Uk(|S)Ep+4)) zl_YI>FpE8DRY(`Xk1riT<3eu8k;veg`9$(Vo7>i?8qZ-ySouoX!%VXODqrvm*TTwH zDiaAo9()i`IW5F+2HNwXEvE+^vH6^vz;^8@qK613Mi`i3wf-R`(3<21Z|l+{$O37U zqY@(l=1@9Osb)%aSM8(f<8vv7Fg@@)%bA=5a5iXuO;yJU2aj_)KINe?p_c0 zDMqP1{E>jJo&!EF{GEtT%qeGdeMp?u3Km)is(s)#|1+1) z*BWu#E=~2_i$(s%htG^!1i{c=y%b3L;X~p)IBN!`>(wV&upS4DqcQZ+{0;_06P+jS6hfG3HA@M|X4vV>k=)5N*tT#AayBlJMcJ{U5|-kvXi%boCNhs?{l&DaQ^=xJsi8NbuP>5ip*g4h#RaL=q^8#-=qYc`V4 zufnYCF&DoF>cnY38i_b#(^btMH^sle@&`5hKMP?j72P+zZ5RnjgIgk><#k~eT&!bs z*9M%o1x~pf2K0Q?gk6yX7fDg-u4+xCVBZ#MTUOAccJ*UTid`XJ{X5fgJP(1^GV*h zg-Uvra0$EeSKg6a;)|32qv*WDv3%b!j*Nu15RxsjcX+SMUfFwPWoEBr?=5>IdsCET z6-uQCQZCl!^L6_Hy&0Y!VfR=%i_N}e4=%1U;AlS z^iJxRTHJL3`-;5o>WdS;<_pu@-#+Nzk=Vub!NGIbPjAhTolWGH*=uH-+^WJjgR4qu zr=2j<mCh1#`g;fW zT=OAv$GU3R^@RPbuu1vT^YuwW|FlrBH-*a+FHKFy)XWK^dtLchA(;r8zjeRac3K}x zODs35T;n-G3EA+@WIb3n? z6y~Z#x)l?;(nr%$L7L?^u+>6{{2eQzla|FrS7I*M7Od(s%NT;v!~Tas65;AxE` zGPm2#XxJ0EM-L=iba{|sv{+;wySc@iC33=BpEt3ljUb z?{PL#FU?ihTr9#5yI9?5i1j*%?calH!HZa5cWh2DxeS#K{G7{QbwxQhv90Becubr+ zto@oU7a!FP$oZ6Mp^EVLQ-7SjP~}Xu&(fYc7`GTGH%pqUsGCl z!I_0;dh9Guv65_GLR#oLM5L*)#>K?pxw6<%eN}C2J8(TLX1T_`T+wYIBKslu>{Sb20~!b9c9O z!&*fFyL$eS5@U?;Nw7bh z<19wsVPdbAj6;!|bHP!2J=e{SqFXJ(D!xjBGEp%<4 z{{7-8kz1K8_^n)Z**z*e4Q-QgW<+XVSV}ZHw+~pGD#qbUCiYGVl}OaTNv_r1;f0Kk z9(w&Pcf>8a^qruX8e&eJqc)QhgKN&GWW9*{DLo+>`{-;GeheBpSzVlr+O7N3=Ga{j zQ@6Lm*#mIA!(!l5QWC0PS^FSGJP(7{4k}C#IoMkkyQFq0WFmiFsKZ z)el0`8Y}WIXD;HiT;Aa2uymaAnc~2(WJbjN+lIm|WHJ=NPU2TAAHh*i+|EB49)}x-t+e;uh}_`cUFQb` z2p`^3n8;p2Ct)Aak*UEEh$&J=PX)(uUQU0ie@28p2#5%sPV!Fu$T>O*d@t!grFWQe>up;;# z)OO~6_M}9-TVLd<|E>zPe;E93D6PeOh2PqA#Jo4aVfWQp`3ThCH+GxvgbQ-$3RhKA z5k7r^6ow3y5AZs0bLZ5D zv*ENm_`u?f5!Ieo@aN;xp^fY~beR9Q=)7EtD*|)_V+KXY|D=Xdlh7p}rg911NE5+n zCd$h(N0W&8cIgR|rV_MMINPvqx*l~ItbQqe4#KIjr1+lV7Ua$EHR0Y_2WuM6z+jtzdJ;0bC zpN!I!S%&b9=;MU?bVttBAjjY%iIx4aIO$`4^;T6THdRw;=p>xQ$JV|UyGAqcdGCmr z;MsflHjDFlC`$^abI+vuOJ<|Bkffp>Wjx;X?~2?<%FN5~UB^UR zlu>bEpKr$e_6{zkc0#X5SvAc}a0i~Q31^A#jX=tV%mHa_A}1{>Dzb4i3JapRowegq zasO_O-`z}ks5&$T_V;5^tCpO4$EN|=AtR5QHVAi9c!hjV&%wii?Q4UciP%he^V|L5 zbfVre*A{=`fGbzJiez5KAl1f?`~&`}m@L+&2t;15#`Iv2?p6?I^D^2-PemDY`8 z)pN(u4=)-Hnnz+O>q1+(UouiD^Sym`HXjw;pSX&O5dFHqI_1Q2ZPY!>ZS;+(>;G7I zoZyeI!6O%hr@r2+#LA@Ib=;7S|NM>n%40HdaUgv)j58n4wbNS#FodImL%;=rfI!?o z8Sr)XE8)jZen7Q0c>(LY&OaRS$;P<9dDqmR5%~=^%eLiu7cr9hdbipGVIqHq(kD6V zGEP{8IB?O2AS_>?JxcgvB*UuD#pJ5vP0h~dBI*|~vV~zGJwF={Y~On`K2?r?L)upP z3}TSu?$ptKf*0Yw#dC-{4~Z)kY0We^-Svy;`VIr#Kw++YBW53V`A%vF9y=!grF zrg|unP~$(|r9PV)Oqiflq3_Sc<8GGLJO(v5p+*U`E*ZEc`HI*}>xh03#XGL<0*s3- zx@s_=Pw<6$_+TX$|IP;*x5#eDh`PY?XzC;D80-&KI+kldfnNf72Ba+W@sQuZ53`TN{oFgK z(#f2Ho<%>p#s_nW{@f`m>$jm;d%?5%(%wbrHX|vk5xg^wh?j=E+p#!Nu@b%0V2a&L zhr<}h6EUbF5Q~oyxx|#pk6eTzQRhsU-P-*ae3HlZyMdON8wxG>SYPx*mQk*Ev(`?y z&GCTzwi(f1_j`D?=zbDrT| z{rIH@y~$-iMiKi{YD*Y(?qne9MOS;3j`$#h&zxkkk0*N1%1wEg#bKSL@1e=uIy4>A z>G{qOjgxAWU*$F~V(gRTl--{a(O_vJqVrcYa#5M9w03AA4~Nsw=G;b9-Z@ThA`y(c z-55Vl&k%k}o6#{((F-`zL#I=9D*>IC5(-_i^O5aXobdb7SZuoX#6hsrA3Ho47Styz zv7z@=#;*&7$lEz`gMpZXhWw4Wvhp|>DMmi~)&^We9Z{8y+pT`MGr4i%!JR~OnvW;_ zh!R2P)(?Ihw<-ueriSmXZ$uw*b2_<~t_UCOm4c!@x#%d^x+~)41r!ps(BxG~$C%8V zZ!Sxzs2JF!ekU>)KR*Aa;j5SW}DqD@5wqfE)K8rr{>}UE{@&R7?`N{#g2R z13sGUQ2hNU8$BI%HtkbF(B38f@@GQN;*~+BTy0s6+edr<@r@ICwjg#ZSt}WrcTUVc zTnfg6eQDG2FJiFKOf33GNIYK2Xc(!i%SSjdwsE;B7~}Uc+@ipId{64!T_+cek}B%1 zlFi{b>>2*-_IL!IpWYYN|I!}=J*M}?h@MB8Z=ol=-w?T^jn}yTgtue(#Hyvgc_F@B zFIc^pOTLOlTO+?`xTwZKy_pUF(C!Lxwm^nLdx6jyU%ptNeTV z41C%4(M&@;6g$X11hUWCV3L?6%jqu!&zj{%R!Dp}Mw;(chTd?zVCn3fBASGpJK4L+ zyB@>qN~VnB=ya?RiYXQ5C`Hlx*L-CzOiFoPxf81n3 zrAS%j?@3F%_npyuuJaP-WQEw_{Wq8jF-5W6n|zJf4>lKiN^=u%t?I=;K25}`qx)^u z)#C8SHDl>DrWm~7^*K->@G^GY>wY4;?<#&)sHdML>O_s6F#n@;UW9J4^WUSPKy>s~ zeL7$mfwgJTtt!)+D0rze^ZgOxIsLOP99&M!>t8roKPHbu(-#c?g@3Lk=A@S%&PT)$ z{&T(Kms(g*%efM^toyYr=?HWd$jjeHj=6^C5kyzc{6)HuCKGv=zF5 zel4w0#G0tfaigV#uB_Hcc;HeO{*$BTCOhqpPZe$~3*6<$pG7vg!s~H3n444d);AHa zZ{8x;HVQ+o+y;eL0Yq+I)z`iw-z;&_is7Z*TsA)43Rokzv%(Kf8SFFiL=I%Ubk_DZ zk&n|MJM^ul968#(^enY=aG|5>;|YTE{yN0P4jD^uaBFSIe7g$04oTjVZmP!SErza{M|Z_O*tf`a!;-0--c~n?H7chTsCjsJ}H0^LZ>@ zQ!ZbLB=n8DoX%N)3c{!F%|C&4Cgx`+S)JHjip?ehd&rcWkoFp^iOd!q51Bi`htTW~meKryL8DPNz5xJK|NHD}iw$ z`6#>E`{Zh35I*mS$)_aFw=NRYg*wy8NMBC<->%E$c>df69s|cd0oyZgSyv5P~sub_A$5RKaXXBB<10^XYvG`j!y6KKf zE?yDUlWBWyhHds!oyS$<(5OnJqMOihIB8c7ca}t;$U2`?%v2aY|H5k|Z=Hv%GB2$g z+5ONqVk<}XUkKhuBU8np0^Hd;Egwx4fyO+%ESz)s_*b-2Wi2EFt9)60*6tAeWf}JK zPl)foYs^20J0t)({|s55*d>FvsBW425_*T;d%HHxGxP9+Z27l`DMVee;*ff(p#*nK z74?b-AHxGnTH8uVDP;UEcky_<2kJ3$^?K+A<7Yv+qYcFQR%muh`5d1gw$FNU8J`a% z=J&feE*IqBWbD$!q*@M^{ww^<(nG9+KhHU{?NDLLvm}~*1L4?r`6l_#gf?tBc;uO- zS}|T(CTsSfjYHX&(;#~{2~T*<6z|_p)OF`$%gBz_V1_qsKvjkgDn&*PWZPC_=E=1x zRtshH#%MO zVG7~6*I0tVyfzJ)dWW}{iE{yNtE=~*Ww!*TheyoJlB2tqej zce2~A6KxqrU4y;DkX`W5WJG)hF;})UC^?5HM^6z=CzyvpFCHE?znh5ub=xm#FZ*Nb z3Qx~h(_TDj-06I~V*(knGd1hG+A-xvGv}5FkyGoe>639g7w0eMU+NVtK#^-DGnEGQ zc$zkv&GADq>Lm_%YO076TnGb_2^-N*nH^I-_aOy)m=x}ff0x7a-^JeTBkB~Xo+*Y~ z2V$}FJf(NKXAM%7ZZ0dy5IR~L`6-{w6cpRgI=OwL1z%a%*}nHT{OyPlm|=P) zdoQ&o8b5ZpB^?}v|HZs!%PNUS2J`C-WR2md+g)a3xGxS5Ul~2H^gbL7EOv6b3BAo- zx&y@tv$5#L->a5fl8p_8@->ce1jko?udb^@ARe^d7k4(Z17&6>YE*3E(4us$s?N|A zotOuns5?X=b8N$0om(8iMVmZ2;+KoIg#Cw_{ z^NeMVsRgrTr|q-rucP}?Q*Mb3!5NyizQ26p629TJuzea&)Z2qF zoR3Eyto~Pili)-b7>-sGJ_IfahLx?X-2`7Kk@+#Puiw(R^rC-09WB$&%xn?gEBlS4 zYD0S*&XYv;iXSb;=$J81%g-?c#{u-TsdKPKl7%nAhTtysHrU%#W}&vXJ*!`i6+ZE^ zrcHZfjs+AaWM~L~yu93xk=8$_@Fa_I!Mn*=6rcM3)b>{%CY)>jt#bQ3k<0u2kO!gT z*tny^H}D_<>D_OOP>%TH?wM5`S#CP~K|OrXuA&ql{*&!WIUPj!V7`AXua3gQDvn}S zL?2>=OziND0YW#PG#`=oUlfk6l-aZihT(r__Kc(tll=Zy zz^8sc1wYXiEZ7tJXPFz_+vG%@yU@h_C4e~;&-0BeI}!J5A$Kz+i!2fQLYD1V)nhT+ zGK0FYy$wa8Wnp7A7XSP@n9ekwfcn+1CB+WJp}c$(m49L~ipJTjhEScyiXST^{f=!M`XlVS<1jzvZSuse6r?R07_1#g!uLv26ki&=kl2|zM79R; zfO@>9jw^9K=Jb)gx;F{4@)s{2KOKvo7{aBM$Re?e)>SM>ya*rGH#!I=CE#A6!WKm@ z0kquBvu8+A4o*T#FX~zkGC*?ZuhmyThVrdX&h2=cyh)?S<@nO_vUla`Aa*a-{uo1-=*&{-zZW zirGfr?BpAh3E#ADKP6)Ohb<=cE$rl4%PmunVME}Km65fw2^9>gN|pu5B7?w2!yY0ZWsx>0|1Kimz9AE?^j!SnEBafWyqw_s zu5IpE<>SY@HG9*q5xRiG8l)_vc+6LQ!)*QG0d8w;UAD`vz(-5YxyQ7K`Ip0~`k`Lp zoUo+UGs0hm4|$VctLj#w(p2YHbpaksJI~qW(w0M&V$}L)25py&YaZwY!hs=Jo}L6Ysfp^YWty!|piXVxVCn8jP}3G33L{ zsi@Fd-u9GuZh99#iA9R$A*p*dWXCQW_XUy?;sWx~H}R_CR(>*a?VGNx>>~OucP?ot z5&S~>v>ylR?nEQwGI>xHF}Jr_+pnJRF%FYK0sEU0WEBo^7I|xwNH7(QDT{qFRGJC+_EbIueV! zch?$HH`ZW7xnsBm;b&Qt+K_uo_`()w#Hu?j2wt#Pq2qc+D#2OedLBMejkUtAkJD8z zAY*A(KX33^wD#q36`rlZGrP`pIE!VX>x4zRQCKmOs4lcD3D#oUoBd+0o#6!6&LQ+P ze>hsjJpSN)Dj8cD887J=WMLWe>{~zE3=DKK)%^A@4I2j+MB83BVxC&d&MA+3Xm`N) zomCo9_ek92qU6p%p9!j6{fAR9;YIc7up_K!S@J9LW_>k&`s6g;!w0Wn!KH)iA4f3evZ5DDr0!{86LlbL_%|kL*FL#D$4SY+2g9TWKJk z;Qja4ZH+ggG#|Tt5>pr+V=X!P^kxnY2vyuRyIz1%H4E-X^>Z-k)W^E3)Df7gE#Uf- z=reIg-hEj}^cUZNiL^^$D=LYoF8H#B<1czy${)l!XBjZ2^hhxQGu!n$k_b*v!QM&g zM|~dnbhr8kSw7-iSazA^NNqQYm_=XuQdUpsoMyhMTPY!HuFVsn{0zME_d`5gn<3V? zkQ~Vh{qS+iLy23?QMi3Tp8o4nAy#c(qI>5=aDnMUXGr!0H&`*}dE9?z&|fLYpX6l8 zOh!iL66$Av)+4~f+1<+}$llr4+t1g>#V5$#AKZ_*bLf&0s&aSEWCBv*uOgsJu1lr2 zuq(-5)mI8VM*pw&N)F@Fd*mza#9SMi0+DlWvw4$igW)^2w;m09fKK`nmR66SSM0 zVx`ukVO;cQ_V77PaM+1gb~4@}WtZM-iLKQKCbP0E;qV30Oxe@o1}$>1uX=sonSljp zvR6hbTm#|x7q#1;UZ{aj_qp4OqB?N9sOCeczcQ@UH2>NAx`w19=Xikrp*!rp7bf?5 z#05@?uEZ$S@_`X^|4*)#5HPvO71Hqu;5dzm6;Go!FmO2VXS(l#b$+wcs=^X*v~^Ej zoBsz=5WmuC8woi82klYz#|U1v0~yn|MfF)=0`?9s7L~BU!O-k0kpJHuAYw*NMhHQZ{S_noJ;gM(kr2goLyfz{a=&2qKlaOKl*(Ni;H zaGIDu_;!X7J`~F~bxdo5zd=IZ<)bP<<=>MZL9PkER9&uRRFJ`a;Up)EEBc@$B!2i> z@j>AF)?D)TnJJ3HwEedKD; zFJ9Ez;J^+mvagiKnK?l??9w?=w1l&K6t^4PrAU&2M{9PUREJxQkNfwEsluH1d65hB z%J9zEiEfQY1GL*33vv1qOzrAijJ_ZPAA+A*kAzBrN9?yYD{)mQT3TVVJE8(ZIt-)d ztMy=P<_~q=EeR0Fm#P@z_l09W{okj*2S}!w`B&>D3v;8A&YGY3p^o*|oCl>T6zx5< zx8$`9lJgm$Fh61w}e)Va?ne-XSMpq4}7^ggJx8E5E-0r0Jvx9Jg z!~5W~0Wrw#rZ(|r6Nkl?uj(SbPf44aj)!b^YXR$5``s;RIuI9i@M6EX8hmd&CR=I0 z8&+3eKd{joCNZQR_v|xLhu-P&uTra9B!9g>d*jCyL9b->U^Sm6Bqh#E(&(vzp6k~Y zPFY&`)im(rincJ=m(eTo2TGCNk|lo0=X*%9J5$AC&8Z8wuih-zZl#BJTJN3feUCz1 z-0@IBN-Z$7DlA!$iickj6K_Kr8c5{ioki>|ieQlamf?o#S5j#nUH_0qI#{JvDyRn= zLENuxu7yIt@GyI-*SNpZz2$u+#1-sVh&vL zf27Q$6vd$h2OvL0clMo}GPtdmbyHReL(zxO+dsRtAX)X5&Yn$qm>2ox&o}D=rEguw znm`gx_{{iQYubVK?a}U4X*1aW?$i4V^3Gr=>iU2i6--MK8~B0I+Aq?7k1b?`S^%KHT|9edvoq^Ex%U}GOmmR!t_Y!gYD+^Q7ZIaZ_tRYcjTv_m!Dx9oKi(a~~ z413Z`8!GuEpky&iN$h={!RSu6(w2d<~6 z=V=1X=PPk8$8^9Y!(7=)^%v=??KD$U@^Mg=3<%kzV*%$%c9XvzQl!U}-WK)S${-)= zSsHR$8C-6BI2E|>0PK)!#tnZX!NFiX))z;(AdScAy?B-{XbR7@`~Ogd!$R~Q$KFUn z%`2_^1(6f*d9X@qR74+!jGEqTkLp3ha}%vS6S|P}sccTiQ;JlV=6R{)niAxX_0Y-e zmVvtF$3twDQY3z-f_DaQEx=e%cC~zLhh!nfCEUJO3r=Sq;y&^~30(DC!ha7Mz!T

X9QhGCmF|Q7Dyt|N87^gX~4w!NeZJVEx7Y{IQ7RD6`(jvEBo16 z6)d$*FXcRwBMhngGM|^pf%wh9tIy0y5VMg|=E<%KS7v8ECpc@vX|fh>#k)*!#kTz6 zdrEyM6DKQ;S~LTjo7?Lhv702_<9cryQ3QVZC9H0~p@_8hpyGhSwAWNZHm;!N9X!-i zrV8>(9E^{M>j%6NH24~!0`HTF{BUX`sN)E|y(%dMRQWN-b$3{Z-!stEUKWRPN0uHk zW?k4>Y8oGXWC#Wdu~yP=Hc17l9rd=Hj*#}2L&CaF0z~OPSuBYf!C>kcPS<`@NXcRg znD94*Y!x5x^4+>HB%9G4Z>dilz&7hNsFi_LRv^0Nj~4uWp%i61t_PCU;C-ArU4~~Z}CnGpM>z;`?puePC=53wPV~1S(t@82UETy zwBL9=9cbzg6$x%mzP$nvPpz!IvMmXm(f^sB{!a+>IabD7r&Yn+kvh4UUKeD&f1RHD zWegQBcw(3SBY}`<^DobvM!@6r(LGp445l8Nd|Ob_hf5qGhlV%}VbwishFpLfZnE+2 zN${}-NBVrZ?*ygJyDTQ*Q>h``<{qQ_S*{IY)lz5n@FDygyGO$lrUM_2L}@H#6YI{l z#`y+4BS@26?t3@n0dhA<1`c@|up(*s&g{4dFkUYr_g8ZS$GE|fpL^WEM0kfo<;f<= z`>f1v(Pae?NNg<+rB#9F6sM}y^p#-u?}I0K=vCpO1!GXvX&qpC5xyA3rUzfw&uCbI z0GQU?-2!-F{XH+h!hm<11wJ300Z=w9Y>X9;pYXhWA{!Q!KvQ%eifs~r18qm zrNVwKNb|mtSqX|TW6XQ*l&dC;{OEc~u3`)s&$Hz|B>m57$D{^SO&qE;m*q%1=hM$VS_a6UrrK28lp=|JAKcS$ zFaipdfBH?I(}fv998y8W{8Z(C*I=nIxt^gMgP`1LiW)DU4vHoUOQwOAVLt@LRQ1sOwt z^uB*tbNbMv>-(aFsDr5X(nTvdo(7u&=et*I4uZ0+W`?JP0SHY+ePW~2gW5Hn#tv%- zSQOIN8q7ZmtF>M?$4r$$^5wzxu;Y6m@8B-?t?N>x4Z6mrTTk>L?}YbZg)%7;Pqvaf z?JpkSVh}4Hc_9Ot=V#pB%38w5_w?-7P6|6sgG!fAuV zG*YlE|3nLdzfj2D2X!!LSl>5WDF>oqw+>Ld8$r0l-b$uzLkMd0y8ZbN1zhox%u>?f zgOX88**kyqfd3QSCmU*Qcy%$KZt=J{XmrI0ny#G!0aHh>jiEyzO*V6W!8gB>eRUw8d-2A-`_j)rFA>v8}JXM9vky`c|e2AMO7PBh>t9nSrMPU<`@-%CA-TS^>32e3x$0S?J1o9K$MN z19xN&bogCSg79xk=S=RL(3 z4!b%-348kvm;W+}xgt=vHN^;GfTotZ^8#3v3>|)uD-S!Lj~uqEr2*FuNkR2HobX@X zT*I+5#*p)!_4<_x8K_|ByJ9~^oKJdstapsKpzP#)$a|$Pr0ZdnQ!Oc;;N|9g-AVs6 z956e&hw+&T7|^JQzYgO7nj7`nbw2b^G+z|Hui`8ios@dBqGAe#)u*lO9-jo+s_#;1 z-DYsTFw*VtxG|~fl;#NrqP{S_+$zGcaS%$)htIIGv4UzV+uL;Slb~fh?`ab5443!c z>XfEd2Y%A#ji;*;U^!b*oZ+JY7wImUaSCX|pD2!NQeK8|>^ANAlY9WxB#tY;lSH9C z%_2AXh&(jz5+68LF9v~vMXrkm+8}+2W3T5~HL%~K8}OUBK3krry=JB|G(S+RaqBVy zPAXKPU)lq-(~B!7!-L@_iTP$@xOn%*3lRoom_8lt?$~7rUTx!d8N>L&sk$-7hWZo~6F|-A z>QhjX5oF&~EDpub?<=Mn?u8*ywIoU2Q}8%cH{9j&DM%wXyS|wsMKUT^&$=n)`m8V~$w~mg~FBp0`@S!QPo5kt?cj zEB(3c^suL0~yAD8JnwPlBNr1Fjn3df^DDsbb+f$(dndA-kJ(P7I zi^jA{p;!nUgZ1^@6HdX0v44j$995uvYRu%~EWo59dH3_6RQR@@X36Pm06CZH8>H@O z1G9(V+p7~K64TD9=C|znAb)(r@lFaqoHXnwg@|c^jf2@d8==?Y7r)7*_KF*n_-^n8 z*sYON>HqDWjZ%k4yqiH%g(`54MR%k2nFbu?>CxZQsR3+M!YO}$`$3O9^J>>ceK=*J zKk}HUXB+53odxqGVZHIl=*vMaXiJVwxKc{&=T)zqKErp?zK8#8=#!M;=;*%*Czj*j zHXf|>l35bs*CtD33~Y(|h0czQxUOsG%%{tTO+ZD^Q>;gh7w%=+zBo+O>8%R<1Ai-2 zA%|mVjPap96w}Wo*EARbs}O&Cx2y!n4Co(b>U>Vp+M%!bPlOTNqO*b$7o|x1Z7L*8 zyd~gVLV=7TM>DBDa5R;knDc}PIY|AurVBX+!wLx}xggHCb)0)D99G1$TfeaS!J}iJ zn2mpUgIjlHQcyfmr>WN-k6#_0{ppwjjJbU_kron|3i1O z7}Op7sWG&o2pR`|Jsvu80J8Ff*|ZKw0o~hWYc!T76_^`K(?320mEpOw{(Ds6p#rIi z@{BAb8|+alm_G$ZgR^xhs|VoNjqK5=tT>qH+4**~Sui0@-XMA`KNN~2Z>*)?yszf80_*3;EXclh9{2q8rlsV zV9y7&ZK})05OyejKio8jAIM z%bwY2f$%g7OSrKi%x`~p6OB`ahO4DbQ3Fy?u))oD>8AjUO1B?up*sc+5(TE5_w3=W zKU2==m6NceRCQVFk_?>S)?o2BwIoTsT9-~vR)x+wcs3s+MYX^Wdu-z$Ih9+& zzk`#yX?KMoBHEnc>0lozobGRDN7fX{rBJr$?b)xS_l=3F6Prq)Fv{NCtWZY9jv$8^OKs-siQBim>Ljm2lj|9LVMh?tCY=08PVr zo`=NyyNfrPA?u+soa=Ds+z!!!nkgAOzQa1uzL(WnotFe+XJ7ZmSl=a`8NI;wZIak; zKi|<)SrP`koTwdGqX);lN7Cz?)nJh4m8G4c65M`sMLAUQ7D?e5*|U8E0tEDD;-$-w zI-C;8E7X&+gopgi`QB-!z<5+Xhf>}G94UD>Yt_ubtmZ4lc}fy6Ji2BPznjQE`=w2# z6C(jaUCG;N7UFO-GJBE#7STVVTreo~Rfap?>psuAA#lA2RvCFO1k^^0)wG_*&=4N{ zTSieCGMgjqC!>_W$f74=Uo1bYHhS^WYQH6^P0d`q{NGv7{+&YUank{^YeEC&f-k@& zpV8QsqbAV%@4k8MgaoL0t#11N^arz&ASyovbNH{C<8b36qTjOSe@u{73O*IRSf^__ z2+yX?)r_hoVBhl%2}?dD2y;lh<8#CuPQN*OPw$%)aELoheH@Vkio0xTjUH#=*TS&3 zy{8gPe0cv==pH*Tw$EJa=Q{|U)l>E=uhn6pbHFq>GSS@XQ#J-xQUHEg(=BT=3(pM37;-GW3qvXL7CO zPrk^}gQci*l4EOf@XB6j{Qe(9NV&1!-s<09((dQeXFe|5Ky7+Q27RzHgiJc5ot0CE ztibr8%}9C}JruI<{uOZ$G3u0y>raA5BHWsMs08Q!Qoqf}`9q>IRwo(nQiLy`Sw(xD zq+!zNV9g0CGsxBnJ2sy~XoA;w%Jj5XNerVFER8Q7kUr%rhdtAv1=sY*s54wzkUDnc zy+}FN|N93#`!58-3`574nrd4pdDrNeYU>LWhjsG}j(LJBSKq=51$CfdewEd}N&*3X z(fjiyY@nHD{VFhN7u)Pg3yhj0AR0B>@{N<`_=27Z6zqZu){~Pvw~2ms)cUM%gDzCmKH3p(K%zfUZ9f?0 z3Wr04sMxv0!Pln#MDsZf_~>W(cxlrR8qa!uh~^;nd5NW({9YsQ(cF@|dPooS#NyXP zgZoLx8*YpCzR`w(+G8!9PgJ4J=UAl!k1;$^X`8QSmH?;zKOfKA>O-M_j2H8qH;9s4 z==BeGLH%ge&n2l`cp%#SFe=6pQd(}R1eT~lZ~W2&;lJ|mhTM7}S=YvI_K5N$YW@|Vla$r5xQCg=&W3BlHXC7LY1f^E9Jb|l>lmn{OALB!$$bvp2R>ESj)eC-!|Yb$>7+J+9q#hsF#@CdNnTv z6@Dc@A~*S9x;2CM%4a?J8*t^(=8Qa1-;8gm{4xZ#LY9xU=cGs-V+pS)rN>EbZI*Km z@|r+l7{dS3$Q1;s1*ILOJm40M`+iet5^#ha+4JS1AXL9x%HI$_3HDQyA7{_m!3NWP zD-C}VJP%O5`Iy)rf_yt(%4?j4^hg7)Z+ER=g2QXbsIiu`U1*V2Qg{F^Gf^e-d=Ufw zJgbws0_MOT{bOk1?I~z|-0arobOerU8`uW4>Hsn_?C&&D0s-x5v&dRaNX(ZyaWBFa ziaxKd+t^xyie_#`$2S?!n2elNHsJ-s8`W<|sV!khO{@C>(!+LYZhemILAa!tuORHc zAI>VD?w$-(gr}>A!=zq$fWTv=UqMw4u)J79+0KJdUc}2gzitnX|6C+IwPoRYc^@Bb zq7>Y;eL5wh03euhMzM`b3*HZ(h$`3DfM0e$uO=Qn4SP!dC8w~N!NVVE-+gJM;p6JV zq%tKfV(%VLjmPeq4ub)Mo*uUsX^k*lEtRRNz>Z>Sf9U!lMT*C8K1DgJt zy_UTEiFD=Ykzn?Z8qioQva5#Z<2u~ZqCV{G4s?o<$JtaBVT6<|yi&yrESgl(T}4Jf zV>c9&v|jk^gMEmh$9_w~ZTbScu3}I(koGs1&#{O_f&i-a>c5$1O{B-Q=d1Fm5Y%onaEG0(-`PwrHC=%SA zb1`ofO(o!D%%p`~r#$#P&9ffbvqiebONMtZ>%d0~&MxO{CXm>$$c=# zL-&6aorgbG{~N|BWt8lhy?6F@E_?62Geby5C?m2fl#~)0+J%awP)0=gW@JXCLPN4D zEBwyy5AeJ^=bY#M-1mLGuj_>F1-po{d`fZ;;ZqVw+qm}+p;{7hHRH83zSmM1_Bq*L zhWer1CV3wCn5ffUex!iw7I#*2;r|>|*;qbzblY%&>8!#L-z>l>Uo%-MJ z{Gc?6!-}Vk;4q@*W-6_U#`({OGE?;7Tp0Qc3y1L0apmCmAV;hQdQCnxGJp#QThE_% zBMj&4EDYbS1&h4dluMVXp|G2|QhVB!cqMyyAx$+Ej%yHYGr5Vmwi zi|m8+?qp9~5qClwlX9|rXdSVAg6F{zs!77NJ>rYPe@4h|{9XFH-kZqz;F*17st86? z;r(4?n98QY1nq5UThLbh>VNW#+$X9fjMKcIC0?(E(W{9)BhstihhL3yA@Vu;1GA5q z!oRmKmdn-_eOqTHH?D}|V_-9Uv&88ZB0W223r>~#JM2z7)ME*Cb00r!{WeYc@%Bl5!}bR9S)rRKk3=r@Mn8Y z_}Sz380L)oDE(I%8$NB9SF>FS-YL}r(Kvdl%`U`LZKAj9yeTMX z-S1p%HpWRa&gJAhVa#6aTzFk>gg={`nYE-2BYt6UV8CLDSYhuunpf#YIET(>u}&M{ zc7$-}i;#K3cbnd|X(KPB9d=NOFH0d*bUlYe7VYtM*{{%Y!G&Oc^CZwViWi!LhR3c| zxDp+&mZw$@m_lZ8X6|oHDi(J>bh>V7h0`OB4c9V^QC+#ec8>hs@h+Wbh7bt4GvwWi zHO0rPMbCpTYvFK54Q(X(+}3;_TFm;UPR_-*8gN4kv&LWhA5s4xUcR|vuIlZAjOtH9 zGqUEG+4*2q&ty9`oqkgY=y?$4#6Z>GcuRc0@}$t|iVLAj{3K322!bp_Laf}f0-hG| z?O#+C#c0R2SLRG!xL&M4Q(^?iW-}$30dTDT=** zxL`77nW)TxH!Zt$5@Ze$@duXPq)|R28Z{^ry%~+jdHs4tc&H6*Qp^ep*({;7w1qYK ztP8=CH7n>Wumen8so9JL5?~FVph-Hv9Txej6P?B6?`j8MLzyCZ9<`;uPD(Hb^UaHv z_fyMoaJYA?W113_d9Hl^+v`HK%ZSZ*pLQYSD^eUfNxkHRk+Ni4dcDpVrx}W6r{k4aXSyRwClHV|LZsSlObx`-lzT}vvU29tm< zyZ;IAMUi@;kpFWp;;_l_Akh#Xk}G)etn#%les9#I3^jQZOERLFUvpfE{=CQ8W>0+a zhDY-8wFBkE)7+nB3#q_1cYS50gmlDNXbouGBIk1VoyU~=bntj@q#lid6jEaE?wuj~ zK_T&-DGXl{AR7_Ac(2eNed|0!r=$(h;V!VPDOU{&X356JKgj=Q<+!`1dNr}#_iuw! zwmF{mJ@t(*kb~U}|4u>QSme?q^-Sle;COEb%YgL^;i-0w-OHFA;?`1$tr14(Jj?qy zlgAl1``2e2BXp4#UiiRWBw|g=ZX!{}Bs|DJ70ME=1?|BaQ~&?9ju? zr+MP<7h;E0i)MzlA?5?9-yRDx#CR&Vj$uVF@r}FUjN@T97}(70(0yWwx6L%PnLBi20)p*Z8giF3f9uJ=--W5L(bi5;l(fPSs^#w9G!6f7~yvGca zRibR?SE-P_MjfQ{Kp0>Cp165ITLq$jZD?#9!f=GWj=zsx7yUNuV-dot2;eqp{Fw_ub@E33hanR%5hHi9Rb`O&wt7?yL>3wKlVF-v-LKQik1OpyAJgl zKDUORN@3D%ZABcKxjrUdW&)J_`#j)cjy9of7t4y>h_m7kKL_e_<4zC#`>)qcK?YH# zXOEge&d|Uo|A#S*owgliYc>FP7<=uMgc9~qvwyaGK2DfF7%y=h4o2}{MJ{U}sSC-S zq>(C`AV&6{(i5(7Awutq82pHl$IiPogB5BM#ARdpFHxf8K7{UqXED16l%*)h1aT(3 zx>u^a)pjRZy?-5IHWkC+D%vEFI;QV`PtvnrvA{yZC&KHtBbZ(&(3KuvAtZI&A})Sc z!-=WFYoY?~M3lRfGQ+?xf=O`8MBsUINbWS1JN;>$;8s!-_7oudUbf4T(RVHI{PfY# zadvO~q~Wp-b+*B!7K@YpJyzJ+5^>ke)d#bkj~uI3G_iYOf@;{z12NT>T*5NcU{16& zYLyN^JfouiID;h8DU3DjM>w!__FrcHk7Xinuyl52RuD3ScCXirsL}HKgv-8rq>eeQ zbH|)h3>@lKnfEGeF;(b%Q}whcHnqZ@wO&vH^~fEef2BV7BWrc9p}3AP*od7TOz=SZ z`JQ5p0+Oq%^>uKuP{rsTflQfNL0q*lI4s7*0z&3v4gDoU2yuRVU^%7(4x6~A^dt6Q zvU^Pt#~6aXa^A4Fl>YEoOsqJ+Y=CJ|w=4=pKk&6w-dQH+GkPo9@K<{!F|J)VFXv)| z0`&x=zeBbtQjRJ&;BdgX(S!f=NZp7p!S^NfHG$~Ilhn$vUbXreciO|D$e1}Z5kQFduB_02e_^ObwTIgc~nFSVPlIO3( zMe0iz*>@m7^6UWJHW3^u+v0oa=uW&%Zgil_@qz8%mj})}yMY7T`E)cFiENp8K2J|& z#2MM?4v_m9YpHtCwVG(rx36z3pxbe#~LLwBbjpLF(j=2!~X9fl9 z#oUO@C$qQv_sSm! zqT*FqN0`=ULhi;V+vZgh41C|u5k=jpnVV9M6@T4uyJz@2|)W-~c)q-lL7v}iZubI}?;em^t z+(rID_IO}RWyR=338`*dU5f9*xNhn+Z5hLXGJc-&Z%uI!aGTnqsH%&Nj>;d``ikMT ztf8C|&VwrLH6_mX#<1IZf1*QVFOD2}NT;u951P!^XS=5ikdjPy*zSlnlp1%a=XB_y zHl?aKZN?EZDp}I~LQbgvXt(GTZw=Gkr)bxErE#C;?SK75SS zH(ooMxK)ar!DS<{8n1Q_7*Cy?5D^r{-yvD+>^@TOQIS3-oi7RoZk~RE*Jn%SRb*y=1AU=upXkrWJ;6e7!ztrk9+*K@y3j4yPRamA42qF({(V$I+Lw1KgQHp{n`h458QtC$}mIcx5e+sKcP5hzia2&Kt0G(P`Ye&w}&x} z|Hew9Fx>MWocu6K&apeR=t9!Wp(?b(5~`$)j^DTAq)yl&SlK?*qnSJp)!aM@A{3#8 z1JZ0~pAm0w9enmC$sAtQj8bX2w#aHJq;g$+1@F{Sv#2pjGd9(FS&URNU zDl#7uQP;x54%f7^FEz-$^1*x#HebTLt?x0_Z7W3gr`!Cq2bQZ(M8<3H!!-rt*!TiU zOr?>p61(q8XmDI=b|dGqh-(WGsBHu0KinZ zLF)Xm-iAjSM4({LxjJKz2EGWb1@S&*xSV~*bhOtPmB0UmNpEGuMEIIQHJ>z2$0R=O zQPV@yp8XtpA1LAO$V}fg?~6_4nxLr>2Yir9D%^0UfUHaN4T6Ti3+Im(vm2xi|5iBS zY&I2C$@kqgR13j^v%$;gemt@ZyVTV)Hi`C(26HR<-S{4pw&%;NHiX~tK44MtgT^Ok zYK{gmnEX)Tt!r~3T!$~%+*&t=RoDTEL~a4F9sWoWev=t1C+^w(yTpzXt>-H_9XZ6) z@f_EZQw+EqtMK}|g&x|2DVpL7L;R<-IEGscDR-`oD#`GKpcAZW`yd$QlC+e2Myf{QkN zJ)PupR@wV|GDJrLK7lR7<7i#j3^^_)A9W*wDUz#eABn?s+po#m{gPNr6@koy5*{< zk^S$;yAf+HV@$=XUaEeYNj>uJWuziQZF>;6ow&k4dte@^o5C;cP-4NH24+GyCQY2Fx+#7pJ- zS?BFc(Rc9FULlhEH6Lnt`P$((apv!vp6X6Ev_+|FSCezb?WeD}P9Ah2?n!S==5qxE3h@qj{Qt3T4d*qT2Mm&ECo z=Ft}-uEcDE3Z>e10_R(wt1!uJfzFF4yJu&^abh~A>cewmZ26be7s+jf6X)9=$y$ey z{P>h3@0T`$YMZ)MJjp}v%0D%7Ez6bAk)`=G${j~s6P(%8;v@Aib9cq9o(SzjlupRE$m?uS%Nf_*xk6#2Z-! zv-)k1i5y?^nf<971j8{8)hiB3*s{NB_oEyQ1jEp53l%Rq{N?}qvdxV+Q2OtVoZv-- zzfyH#mOOwxshqqDyS;Fp)wX&#Kovm`3WQQJ#PRY_v3r}+9C7ybc~-*!O-QGlU)Lge zx$5kovs{S^ppd_?NSDovjM&Qe0xBZ7^L@%h_}K=bWV%PGr`Q@4Yz9M|^AdQ}P^ABQ z{0M63m?Wi59YCX8_SH$80^d0kt&4Pc!7C^Dc2vq2yE3E&)0@b??7xh5ZjNvSCfafL zTol0gutBL9-8!NAh-WyWnACeB4i;ENI-%d-49DA2CwO(AmHs-qL|oCB8{(nz2mR?u zK{XX4B%K_+9Wtnn%-Ukk5jY;xYEnu-)GgrW3Zn^^VlqzU9^nP?Zn!|-r zLE-rse(*nE;}ah?#69nPox}}NXJ-v)JgP~7y6F1RpuP@eJXHUF%V1+09TC+iMDK#j6I~;yX7kdLE)L@ZPlhXRe4o`NPYd1!U zqfatc*{E|nS~K3Lt#QlY#wmr0Ur(*U9G2a^v*8#1{3i3W$LkYBUhr(B!8-3Ll3S(iqVMzc9^L0joC2WgOQqvO2%m}gakc^ z9xY)8)$EVUJ=MH0JQ!K)Y^{#3&!aZ?skq~kNY~qVa01(a5Y)uTprH?5jh>&z^v^`T))-yJ+n^%gOxKEuiPL| z`0H1V`TNVnrk$|4$!&dH{<+_(gL#@LRpLz6YT1WE$I#jVNlC=9r*GVD7QhRaTQPZ_ zCkQ*C)*fZDe}CoG6;lyniLi;V%cYGo#8nrOPrt$=LD9OsX8VFFyp2D~-M;yqpnTT$ zrn!p=RfBUi`>OOX$j$$GK-vP~!5iN?XIu$8*|lB|vVJ~}p3nU6n>}*Q>~`pEv_taC zt3O-PmGQ>h$v`dD2T5!*ihns83=7@nVSAs8;yJ=`5C z2NpuVCg-{>>Az`sJ6*LO|55KdD@_xPohetVV;(x9B~fX!yaHHRm1k?Wa>p;nU3a;} zoIn*J9_9B;9=%SfqL8BFrrr{`p)>ZlV?zf9KTMA7 zB=?@TuLn2@+&)G;-SV;Z4#|5^x+dtXKDvU}G)G0AzRd$|ku=kbMRzE!DahwbF`)TI z?Ay)S0MI&3ewi!Shlzayv>r(lgbnlM$H5LNxX~(;TD{1Kf{N3|MG%18xJ+)uBMCH3 zo{BcS8I0wR-m;&lsK6D$yzN(|0`5GqxvZ7lLu|OcVP}bB!-?9K_Sjra(2mV@)GbTH z@N9ds%2hM?O|?*3eV-s42<0s|NPbi~E5L-NHxeQ5X?FWH_7YaS6W=rh?T~r6)jMHQ z15sw}3e=xO@HZlv#Y&^fF0 z_fr>QsiDN-=yq3vB}wehWjSM9^VYi?e@+*1W}#(!XnmmP?povI#spiFugjSPKWr((xqptJJZ!St(yr|cBcZlB+t`p&&Q*s}SNfnPrhBnsppQ6n z?dCDYPd1?bRhnTy&YS+_mT6AsmGF%&bXj7W^*H$zqw17_tG13M1GAiwFcg$FMR6t9U$8HM|RB) z>cH~U6)O8CDdbpXkJ3j-p`&H!(sztsMH*Oxj87JmSw05C=M3x^{mQ~MQ zPqD>*{|!m12|wJDq+Pp0`m=i2IlCFH>2O}JuWo2M6Xo;Kue7K1@W-t1Z=hQgG00{u zGrns-*xuUPe~n@T{V~ZCAtZ0;wqc@@=Mju?E1R?2g6g;t^17!|&lziFO?6vCsPJv+ zhM`U-H>ldKYMppVauR2fpR$wtvD`=FL(KH-SoGL5eo1op%g4&*W1hqimaYe?Fk^s< zgg(*K3oZmZYrIwTZ%Ir(CL`?C#L$!eb3T^T2d;8G*W^gF#?hk4z$|tPFpJfGx}Rx{ z{0F{2zS=QhgL>O{6+wHXgs1olDJ~I%r@wxF@>LZ%X`^5BoP{tf>%9GeuLri-zvPX{ zHA6Sm`ZW)s$HW(j7Ke;TFTzW+eziST8=_?&iWwvO2;0k_6RYn#!AL|^sPd36VY#fY zTsgBz%wOP?WZP$om!T#u=PoZ3YAkPcr1+YMHpcZM0o-fE3Tvt0n+07gsWQuWIsYJx zlwwbm6tW=q>hZg~I zwwo32<|`Z}NKT>Y{GomCm?SWAMvO(uO%&0w2b5*3{}4u6PfpX6>Eiu9Pf44@bHsnH zk^1}CT(I?>=IG|MJ#=R!J*~ABK=b^L=bHoFM4#PeZqa@Nh{;Wy33$teeC$4J2 zsn^%g%vup+EAOY%zma>{<;R)Z&YB>ALSe(=5Xm#{=NB!PTqi95cAX7WB{2H>fkxWl zAmVhRgg{hMCVB%;ukLat`PIW*W$syJc&6QNVqKq(%s-M$?W7Knai>u9Y^*CvG?Aqdg?h=TcYMpX6lgAGc(QsyZXE4r(@0_yM!-JzuaXhs0*jGKE5%h)wX|lCU zA!p`^4KF&!?e)6Ei2CD+ed-#pxqso%rBXHI4h&uFe`JBs=K>%2>;-W7dF(UE;hp%h z8t)bGpodsL=3TF+8;%dW**zZ~MWg>Xeca|LedKHw$KSZ^i6gzMyt|6s(GtlLEB85=NY$60G;m|FKB&Iz-%uYl@aL+C9a*ZFD8AJ*Cc4Xy>~jpnOS}!XNMk9&vU=T z(^!wVOV>ZFa>5rvR@bf@*OH&J`%K*&j|d*H|NF9ND-Gc&+YelPhPLmjCwD7CA4N`4FueL03Wyyj7x-eszvWy(ytNk#2&a=El@F*@`H8v6Fe}nJz*3N=9y}NDNtS*V)-z zqo9AIU1kr3FI1U&LyPx{!62<~o<4^a!j0yP8M_4WcRYZ1$ z4e4Wh{pmmV=c~kjt!b75D(A`I_Jg)onWW$2HDe4D4FyKts|OwLV*p*e?4!ggj$benbLEjheoTnwj*F&Hp!@VxVnz#*A}14Q#7UmRaJ0otJ)0Qm-u%dLP6qam z2pV|s~HiJ~)-v8qK(sxlTbXO&QKW&N0#^k>-JDK6B?^1PeixBkh-H19e_Kz5`yda~g zCjmDX*5l{pq;Y)w;^HyiKg1-n9($>RI4ln|?rl>M#nt>hqvCyrpu6&<_|Iof1g5Bd zDY`F*`owLcKHek`a$x@}v&0Vs{axyo#7ffFJYn*1qudsIJ4dhk82Dhf;*(>S-mqYx zGdjn#h98Y|ir@SX+2ey;G{5>;PMqGhC5l>358hV}bjiuvf;YMSv3iRUbRO-aWVDsS zi3v_O*9I3fJKpOWQkO&@i}bIQR69iN&9ol=&j4Y4|MYH@tr15rS~1OYtKhDqNsyYS z1e`)!e$mrNqoe*fRn((n2yN9`x)~}AiA!fz&(AWFyh}&mMaFSr+&Z;4*xM90_8I?r zBx8rJPYq%pKh_dUvPTy`zu-j3L;G<5eks`K#Bl6C`h(zc9T^-a{rir!IzA7{=e*KG zP2vXme0nk3<~n9e;NXS+7BhPb{4tO`-^5LanfuftGxAo@kz{%E%qAT(ek}v#{{EyN zO?0c2hB72(Gz1saw?OT^i1Pn7KDCw8m(oCT%);*+msw z!kA)Pm{L@*!`@|_Qv4taZv3@L4h85t-5gkPuHxu zC=v6FRBf>}_L!@runaXz!)~n|M>dzmktul7c`m~Mj?R|_pPXTbRO$MNjrcNQEp+L1 zq=Fc%4&MJ(E9`{WZy)1gza*hssfMzyKo;we%F(Kn$3H>;R}W8hBYtSX2azg4=ARS2EO zCpi!H$)U^o>V{&KK5=Mgh07^5GZfpUFlLyOytGMY#)h^V`YmU6PMnm7``Mr;F|A}i zhQpn))W%&fywR@~lF9=ezt$9%<7D1OJrS#4K#^S_j zi+ajU8Cd(p3$6-^LHe~?Va9#ZPw?ix?5YC+>fbVcHU9|2J@1`ZCH2~nx~)Q$76!P$ z7coG0T@WvtmOs4v8G!D=yX(Ul+8FSCBRNR=m-(ie?0w5@@t+-iISa2Z%4>h#TK?kz zMyWHbdDa9v<=)g${v9LMgVof`p9{i4_-B~)*Kl+wzU>TOu|(pcb;NU)?9OJHt4hz`0JFwfN zk(_1Lm$9nn>ago{$}%8x6+SLbj)FP>>7C2NDi=5aV&?bgREJ z@5Y{IGZ9~;G{BN&Iy1lB6mxG%3qP(IA>GC=tXpA@SWiFsK5kYG_Kd$zj|6Fx_4VSv zUtbOJcbE6Ajuzl;b*majvk*)y=jH-uXz)s-!EH_S0Fw9VF1-7?9oZTY^-~$MgpXvy z=F52j(l@v9YVx-uTIGMH*pPlr6WNF7M?MQ-u=`KiKO-_fMbGjf)r1L>lgiUWB$-Ix z2;Ia+jUkvGu2}Tj$)HUxQIUkm{V3~j)n)}CmW@Y z7U^rX*BD|Kq_x8CPOktrCPw7Qf4?N@E(58|GnV!AGEkk99elc55pFVX-_3FH;+S^} z>jfP}=p`1G?J0Lh)*E_uUoAaYrzQserQ|@V$g@ALMgs6x-uq}YLjWfqZs*P2u!L>A zOK}Csg}kT=wQJKg!$OLQ1kDI1_`W;5uyt2MSxd2^NwF#%ioO-yCHpzqN4Kh%_sQb) z`*YEg^IDLc&SHt@phx(ETEoc;{s<_%`ryZo9%8BQc|zb_UA*2gC7!aZ1;)sGEOahL z5aX!){M<(yEn}=DdbK);{#^fqGeQg(B^0`D)UFaI7#^Dv7o~9NUHcUU1v3oaFu&PQ z?~FWunopX7N_bW4koMA;1@9Kd{c~9)@O)g4&Pr7q>w7Nfk3BU)YA5&0yl-v@cD9nQ z^_(Ksx+gs6N1WmQJ*l^W^n-GTe!8)bRu1y~+r?kW_7h^=e$ALWd2Cb%RikRt zCfu?pTe4ie@>&jU4epn+J}80p*R|kua-Tksc3hy%Ng3&82E83x-uTd6DtXmX4F|7| zF1nrM#PEn1p9I%^A|oLra}T`^Ce+5o4TKJ%hUpBipb#q_H~EacNGAC=p)oBwO+6yr z=8lvozYO+;PB3inOQ8Sa(=Sh7h+`$G{D^hcb_^jze^;zB4me~@eDSe{A(j&&zK}j9 zpP757U1MO$`XJ$1zBxX5Yn99}%HYkjd4c^*veL0;_7_1|_VjHad4T;zhiVmkdC*+-vQp)dB@ zwHo24e5Kx}8e-WZuU$d;6Fp5m@-+W2=y%2dEy26yc#&je<3-oILXTg@^3}qKmB)tG0g0!vKs%f zqVkboKl1H!##aOQ9DQ_)mRXOW7~C;P-Rgz+J5F%?f4(V}&3Z{RJHv5PLS^?u(tnsH z&fZhRkDRfYN8@bb(5rj5w~RrLP|?=fRm|~+*a*B+rpbL6&un$Dan1_pQ;mn&61yK#6V^y!CE zUCg=vN$xdgLHFFlk+d}?yl!G$+UiT@>pXM%wY6Ii@%}m%5Ap<{x>4(-B&&$M#^px- z|Mnn@|M&Sx-#S7iAjyVlO$zouDEg(-vk=!}Z9Y=wfPlSTKDreC=r{cP?B#hajP%TH z2^bf|^(`N7eJHYqe37mLio<54{3U*QAyR86<^I8}%_zBdVT(Z>qC@(0EtUs&`0 z;h&Y}Ku~B4W0tNV-k+PB=4i8nNA>Ub`rgKf=AKOPT$LgBOl_2`s@%@wY znBEqUpR}gcH8PjuvVqB{ zJ#KlN_1kclLQ7GoV8@ssDAeYToBY)!%Gfg!Z{$V*(>$|M(Z0yD|G{5fz>VfT_fM`I zaR*PoEnW0OO)RD}@NzwM1w&xCQRq({T)Y)FI7jo8u)N>B^pX0y0?m11)hwZ#nWNyZf$Q!SIllzvq z^Nq}$W?mR*?&-+Xw#5_GC*otGnTSk!weRU)VMP8J_+yd#nGm=t#>u=jlFSR!ZrmLw zg55$v{*tn3p!{~o#H~028Ojw?UGF9F-#v!c5xXg&z4G|-+oReD6)vVsxIpIk$><%J zROur^#_5Di+7BXGDd4k3y*{~TS@JSfh(}2EQ+kHiTzFfSQgd7~1eM7Wcl}GGu#o9; ziav+T;S(16pq^ukb@T0mH3IDT@RnbD(FNx?lp?mr%MoyzQ$JfSch9 zmfWxeJP$ne_g$kBx}p|iBUUY;qvb7}e1!*ZFC#%_$JT?m^mr{(%!?PN=MDu1TJM7_{~(PlnRB>fYnL>&jx_w3p2}Rf-%RY{PwlEC z>-HyE*(GtMY&;p%|FgS59fQel=ZRF z7v_30*MZ+?I<(LqjjdOVx1P!djg^1D=C~Ldq8HBPPcfqSos}Q+PF~0d|7&|RsEgtg zhMzm+wJ?^H5LNG{k6zoc&-xj$4MoYq?EYF!E#A{NqEwT3x=w zszVaxw^q6u@?;>9?Pg#7O9Ar_#>^XYc=74*7whORbl5x0%d2T)k1GqQ;zGIXxGE%< zd8xyyM zes9wv&*KwnzD4JFZ6UUDV8@Hw!Vu-5KNt2>2#KG3*Qo7uVYO6n{5s)-_J&uTacjCn zcbcJZQnnG~AH>XG(@??%udWVr#aMWq{QJ{PNDI@o2CE&H7~ryX`Qv1}F46vkT5W-c zcfpmWJ);5*A1wi2_f76{&kOQkI@9B6Z*-?`W@@5(B8;?bYu{A^97zC=xH4?ou8o6({qYG7W1b!uDd7>egTPQE$AGkx^*)X$wU^)y)i(5afPL zVALn?m*GL%FUPyxU_bEwr^yJJI~8uo=h&8u`AUk3^$Z2@%Y|GlHSonrbB}%FR5>`J zvQ=C)%nN-ka*UOQkBf&Ci$mf0IDR@m&W0*Aw1%OF36D$w4`}VvF;8dIUW>eRux1 zHm2!r-)(+mPtwbR&#f5^z~^+XTSZqLdPYIbFAlL{XHrkzdIOnjHPv}&lbjp2ecU1& z*eM9xQq`-8{DJszVsj*-Dw6aOxuu*Y`;hrGkGc6!F>Elj`g~aDCw;n`+qt}z@PMAU z8Ji{zR&F+pet(kxj%12%l2Jl$M8VNMQZF&NBEy@pN%9Mi)+Rc8<#AO;cFJ#cE9km1 zWVO>YFi%}a?5>T&n>BmOOV5vh$?RtD)uUUmtGOgsQ`-uZS_{EK9qw@GtIO;8O6L8X zJGp+#)gM&@@2)VAxku;vmOS>7b^Y^(X|KeBAqr0=_}sQ-C3$^@bOR1Kd^LYDYS_dH z`-2@yb-XgTSbLIl|HVCsw>x)kbyAT$H+CEv^^}FV{)Y2I^7(KdE{f}!=7Y@WnS;79 zE~JhidgnKB988QpgEzz6;TCa$bFG3E_7|K|T@o0hZ{4_BJgwwXVc#7Uv!A~odqNOa7k z$`f$Lb7kvv2Pq#s{4q;^(~=7!8B3NG8(wfeFnXo%pfKb$gD6cX8DY*e^|`lBm*Drg zfBVc&U4r-S6{aWyJ)-&m%e@-1ely$&GYRM=xp`v0+skS-7}0xNzIqB6EU61|pI3t{ zpI22stv#;)+`n*O${M0eN5n5X0gq29nHh=fKZD)94rQ}XLSDV*t3_!mM&`hKgqdYxJI@j-Si=smSF zV&0xjT=THR5zcr~(bjOZDhitamL~mk^84?Onzj>}?G*GLWbO^mXVwaJHF7Fs;c1$P zGKH!9r>{R=aAA^vYgEi7NgT~04h}&q56>-WR zlQ?Jva3geYW!)VLl&E!6B{=fp(A`(n%1=dM9C&zp2bVv*Ls~`j7C#V7yi*&UBT8`h z##%=o+3(q9q==OX!TD1!-#R&GbBOKoC25Q;x49qygwz;IfXO4zMH zbcsKHc_NiS`{kLym*4ep_@!@W`$-jyH_;nM|sXS-3S!FOJ3q1Vj&x)Kr?}<2W z1q6>D43!~s?v~1qEZp!_#f-Z{JYA|FI#%YtJU&j|c&C=Zo6me8*uBimmc)S657~MB zj6!%DSWh{`OX{C;Yx%49=@9f{OPTFOQT#d?RPon|%=<`AC`+hU2FLXK&wKbxaWK%P zwSZIRGW z8=&$agrUz%{ac*mx^G(MG8dXBW7h1%h_)2aLS-Y9)vZe$`VfD?mQMxO7kNVt4=dr8 zn{K}v;|{d!8*lNC=7-7&gYQSLSU@y&`+#wp6u!JFSsx~I2y1gS_ty8CA~s^edSH72 zF`%~6m3N92;#7}2_olnyP-&`@Lysn!Rnm{yTp;{05M_DMtR%;)Hf5YIY|DXn@+XU{fRz~SpTfR0GuATP_KbIT~asFn>*tj+})iYY9J$%<#}HKK2B^-HAP4J@l)&48QDGClpAM$!gw?6!=QXYvtmUm() zdUXl=zbCEM6ccfHqU%IYxHz6R<_dT0UnD4H3Qh=8`$Mo{hwcGAdC;X@t(lB)AoH4f zjJ(6NG0eQv_}n&+Z~wGSV&Oq zrPtq~Z#IUw5IuS6Pm2?FC;2sA9pgjEPvKwp?u$TI@7BPPQ+vQn+o@|ua__HRaSgRS z+=9N8x`px^yyV}ft5h{A!t1E$`}IL`&$!%qetU8^QQ5!c&zI|zc;%9)5U?zWFN3|c zhsd0or0aL5E!X2a5r*OaxMy*+J*cdZiYRFmiGYd$`hkfUDmW z*h&8C;9BO(9fq{H=22cWZQu{;F`@Z3K2dChy*l~#nL6?HpD>kZ$6j0)G~wE{CIcsr z@b6q??ryuNxXFwNE$VraI=UqLh&=(MwLEw_!y0440nX$>_S^TSBNjU z_hCPFA<`4RKjW>-d-mePrCn6c)V?@-ceT!&Vr!iw07B+&xtKPQQDP!KZs@St=GS1h$6f$zFaR`2A3PjHy)C{EXMW8 zbtzVH6l(u!;(e}81a${JP-I>tTyB=^q>lE&hW?A8;e$KK{cE6!_+Cu;o z$De8~N;5($;gz%k>7VP)z95k<*^}vTn z5hz>z=d;6}5to{*7L)I<6A~N}9^u7lc=2ZE@2s01V9ad0c!0eA|CZcWu8L_8jCrxN z7X|i!?!P_#AA0rSZ~nnoUs4$D47T^3he#d%F7=qKIhlL0YV9NV-54DuO1hJ<#ozch zCG=su82A5oE}Hy2=-8SWv+5Oy9Vi8z0Zq z-?Q4qg1u>N`y(4zarW3j7HxJve9C5tzr&!5>yQ5%t>4;A9LeU}4jpM-U~lB zeZf#uxW97U2e>vDOh&YpG;xbA z_?KxK51J(s?-yCKler)3tKmnwhzh40f=4>Ip^;LY8j<;kP>mOUVNO3!G)c4_U;3ed zjDp9ZLRUp_Iqko@)o&GWrkhFLoZK^Ucg^*lof3q|%+dQ4J2^1oMNfHxLK3_3$|6~> z2_b=x+4jt4Ao#vWrY!$5CpmV*fV!CfRdn9*SiOH3w|Dm5o9y}6=VL$i-h1!86$z!Z z5x%0VQpk)*5|WG(QnF=~kWo^K{O;dhz4Cf^m2=L0?)&;&*ZVpNgL(Bb8&x!bb9CB| z=zWXL+uhwDCE*9xDH+~~Do+T-$5auJ`NEa#2g>6-UQp;6kgg&n3itieU);Oq0Ee8c z@3$?P!Tg8l4a!(+u72K&O?+9c+{a$(u4Uh=`Gg%O(6zsi69W6Wymyn{+(0=&WW;&G9A-KbnA-Jx z5Qk^&h2t{PgQ^LE(1QpxFIKXiFCgSH%~vX3${Z6Yr+paasj%4Z9!CNSOl7JGulkQNgb0lT>LKFrZ=pq>3{KC#B z(lX;6nPFIagt@(z7e2cM_I}9J0;Bd(FZLsPFrP>WxyvpI&wRslif#zQjazJ$iFoEqG_thURF~fMPb-zQF z11wE!O0yJrLXfVB+Zqo&e41qRsiUHYfY_McZS;H<#Y6@@p!|a^{ONny`B4~g-z#D# zxYMDIJz_}7m>j}Y|GT-9e1LWAN7Ih+vcMXbUdTa!7Py;G6}IM)!KLr2nHHHBfK>do zrNDG85V=*1slO6}NhUGjvFC^%&U*W!$M+@7nl8$lmJsE3Q{+ti9%({{&xVKUCpRG0 zd)r8nzyOw94_v)HzsTV_G_^6Wr)*<2&8bu(|EaNx2$_Uf{B^o)5F#^@@ zZbmzQFqqCh{9sG$1!ZD-`%;yaa4a)@ho}YZZ>BXyAW9EJ>M)tIc03%NKQ(EC69l0H z3wta7VXRm2t*VSX4>TyxiDsU*gSxEkUp%XcDPBz z?grEk=5b;T?htsohh6(8%FPcG)oP-3+jm2@RA~>LU-KSS2H)_3s4Hs0Z${kV*N^uL zU(*HQM70z-*BCEcc1ye0s89jLeh?>BL=z=YYPtdtDC|O=0fi z$&=h_Qo!o>jl;5E5I(hWS~<)iy+!gaL7bT>T;9nXr;11CKR2caAIeogVZN7i8|kqF zlUvDJL5$GA%yV^a@c@&&QS{~xoj6c#kLy$|ntGSTp z2JkC=gKgloFr2HYdb6cM2f^3Y1Wr&hKo`zBvrkDLW?B{8j{Z2qq@7~K+-Vt*R;@{C z$(4mH%ep^KFA2cx#lyzkKvIx@{eGbM*$kGen$YwRak087iX@A55#RhztHbq;imY2V(`ZEXDDh%86lCYQ_l`} zRNfTS)X2bc$e(w{N76_~_R~skA_B3DM|^6;BA`^VIm!R=1gIR0C9!;_hB4}g?8%rR z-0J%D@k}lc5ZAm6J8qx?ISNB2B6<(-O<>y{n~;QejMmRf(RtBTkb$A`Pzu~D%ttzf zm0?F)!Pvd?JEo@B%xK6E2pOmMN!G&Y!P0t&I-pk!+@CB4z6@~#g%W9aZ0reb5|&f; zGprm!J1IUm-QRo$lv#9)X!f9WSSx> z9wMDV*r5|U3h&b=AfsPljS}wG7m5IX1%>2POCCb2oxGPff2EAui^)a%TFkQle{{33!haCi~eYmFDM+rItpJa7z z3W21NdFH!gHDDaJdn6Mf0X>Wg$8lqCv91LnRqbw7ECN&iaS}a0Ndq>2D}@r_z63XC zR|ewDDt3{a_&bXYy$=btJ81!aX@kEx(dU+LX5S$!wuQ6<7EKmUU05N>+T_;f0+~kQ zuLS5ES7##jwzFUlb6zt{JQ$;gYwW@=zBRc(<8&0L%BhZ`V@gaF^4#R(aVq*?pV$WhR*2yn zrsnUke^T8ZLM9fVt$1NP}z97+1;is;fHvE0EXj>Hgz zch*(cOxdABRZVN_C<$JS$ z?7sa)cQ<`17-<{Q5?;53x#zB&frz(Y{5Zw-QH3tZ9fl2FWn_Y``J#XGBl3{R6r@}b z&H*}tOo|H^1i`9kq9fA*H5-y*q5=e!k}FDhK&~gon+@Uqk4`L?ss|j^59$76hj_f!4BI zEL6eRnu{UEf z9!Aa?{%F3c2fIV@LyvzPVg65Fv|SLTgU1;*IJ2?=?8nJK!feC?J78{h5&CQcMW-C{ z9j>DMTjf)FAt5Ei0ps~@X(R^b<-3=<(7(5P@p;~^xCWSLuXCe4I4pOZv`_yHAlj!m znB8p$S6EH93b%X+d4MfsN%Y%10~E^K zz8B?#`U7@YUc6;DfQ73Md*xAXttm&eGaTu^8!|6F+NXV?$@sjRlb$TnmBhAjP-$-D;~B#Z!}$S~ojL|UN=cs)hIBH6i6H+Gq{nTay23Gq z^0SH@zN%-a`QVW;o$Q^LO5pZ)(D+N4Bl0^RzgpN=1~90OwcT|B4I=eR0e@NH{-=It z&QBgtRC^=Rnacy_waJ3?{-c4M{q>DP``1{2fRd2<5e|eV!wLdwRpE3FhsW@q1u)IJ z7R#&^&XPio=g0yiaq{!c<|#D;)9j%S7;ltz+RN%n>#$m z*x?A7-BcYH^q0F3go#N)+D7}OsYWrxPp>=1ac}}Dm;dj_@^e_H(zEFzSA8(OL%Qj{ zV+Kbx*8LLPIK+p(B--M`2+Jnq`0_K$7%d0yEk@j5ES}j*-uxmvY@L$&QXx}^o$25W zGnW;HJKs%2qEnsVr{Vra%Oe*UW^2^&M*D#q4;~FHc0a`KOjJ;gAij8Wr+3{K8V*P! zZk}YNXUX52`DuOz zAS*3jAWB;sejIBa+*?38VOVONi_WLc#2%moY{whYi48-Vq|5KC}%9hq|556D8whVPo+`TFWe+5}wc1$?nuYvAc z-aa7^W@9|7v$BOnw)(h{(6K?=pdf17ae_d#R|0LxV&ErqHn3ET7KZOoEQ_zlK?pyS zu+alvh)w+vmz8A+w2egXuE*#DXVUYXdX%eFk`@+vtH}%&cdAq`TH=6uv~N3F9QCPX zx_@+9;Q;2A0p1K%Rj64kFW*%qhLWo(YfE{7&?r32^$-1jUuS%sD>?rPW1UE1JNplD ziPukVt6boMuE?#?Hd!@za{C$&Z3XhnG44yq+loPq!OMdB3TZHUV+Z`x)Sw+%xppSY z5MI5YE3>IKgdc7EW3*`BdaS;05)&W+;`Bs3BJIe3cCO3swh1x3g}dQV=>7k-J@w}W zl}&7vq?*wUPIDbX?B6!>ACQ zTs2MS2h^cGaQP{1xEYi!vYFVFa>K=v(e^%GU(nJrsIXIJ1@GP$LR@?NVAAvY10TwT z-sT;yPcF~~71fsl_ZUQAisbBio;oKSHAMS!42gm1-iOLA=PgW!0jgL~uJ+p*E4N$6 z{6Idzzq0#E3qGFlw)trK8`F;KcD~N72fv0^)Cugl;r9*ihevKmSJ*v{vyo7Q$YwwD zlHasY&U@sjbR-L>dS?yXhZ*41laV`dgwg;V=iJS%C;<0y?*q#V05Qis3qQI9fMtGu zGyM1@eB9}p%Qp`KF^$-BU(k87r}%7jwXP66dFH*%5`%bm>`Yx>kq_*`ZpYr~Y;TC~ zQzsugQpKi?rGI>z<^ZyAJnmg18IaTa*o9Bvp-}y@OaYM&_}bwKO_gMTW_naUg zX5N^mJQRcO1Ofs3MJo9F%l=X?>eor2mB~KtRmJ2}FP-%?+`}d#<%fxL6+!u1xj7iJb4 z=(+|TJ$}A}@uT;X^1m@lOh zVq!4LhxxBr2E7#lkM>mH{NM)d{6!*^QA#j;yQJvz2|D0^lYajAoiF5}a@zm)iNK3A znjzR*8b;PD$(w~mLE+5yL<^lfbbjig;e9Rw*KUvQI~S^hJ6nb2dUF;ie=a7Umfpwy z)7`t(V#fv+n=z{5-Pw@)L|hhMMF$Lr!fya85@;IXa zSfBkR)8mdfk&pEsm@k|JPn%io?0^{XdP3?wUCRKhB`g`bG+A&*isqo~TQuS_eu?}4 z^?E3^qZH7q4(E<7>lygb`Q+NvKLVtm>uj86f5@N)Kg43QMo|Bq-u>lg#x7|6xxFlD zM~vosuhhKz?s38F7b~e25()@<;$LSH_yHB0zS9~{5u3A+-7 z?>!jpAUb#?@pdyK^!WNHf5q=$MO~j=)Q7b}ZdPenNL3P6B>U{iGW0Yb)*pau^S_SBo#1t3b6iY)ynKU`2LO1D(4t^T(36?9oG(Jdz zCHA}Pa)mq0Gt`~U8${fWyJ<<2@uonSIQicj0_5YazI-k|Ul&}2hp#-XFo1#SMAs7& z$C!(Y!jVQwFUp-C_{Q^F0g7DjvnvF{38jKZ4R1L>`d-`eX^wx`_+|WaF|_Z#+5GSF zPj-FarC_N^Wn9edm)|86FJ!T7^}|equd6*`>SI^l-+@Mo=J+0pdd) z?WoRUkUKzdvrQ@v@I4s;djpAZlkCmwi|9Tby2$^|{-Ys8(i*$JZA5(d^m{T*I||Uc zK1h{~8Np^;`3u7+A9%b@dH-v#D-@C>8fb6uLGnEfi{fwGz$e-M+XtB8fuhr)@I@i; zal~imAwKnc!_y;n(Y`>rXj;V_CWv~T?i96jGC`_bzsp>cF1S`!RDg~Uw0*pO>OHMF zP~|7}t+}E6xR##Z8DS!j&Bo1~@*KrV+BY{&7aw7@=P%%)g%jx|@!FS3sDb~|Fc}j! z57Ml`gUvM_X)3l#mn(+5wPLeTp2)ECHaRLzvf%A0VA+2enZd*+jaxbGt{ z8b-ahK~+CWm;PX5%Hb!{z5Zc?J3`d4_YnUb737TTY-2xo#xL0_h5#W|)wt;$37}Oq z>o~Ed1w$$~Dpwzp!Qq^q9&ihQnuq%4jF)g)mWhu z@?^2e)}t=dHmDVTyiJC6UJ%w?KyC z=r0ZsyG@|pWt0Gxgxx~>e~F+zQ1XyOgc5vgWOdX7)Zy!LozOhW(>!u+4NiQM0lJ*M z8q0npuz2vP{xzdJoSnR}^3)gLaptQvu61E3SGmcW-Nc6U6lxFpX=`X+Bzd5%xhfG7vgUubgVz-;9#|bUoB>P zOmc3p@`mXmA6l=!QV)bSQrm#!H%4EYT5rf=Q8S$NL;JO@%{MXr zThmEdX8=EH^{XQhzeJqnjmot#Ghn?S+Dsvtj#Z_cGnkkpfgkZ2M#ah!5UO$DT2qn> z+6NPC6uXTW{iVd+b$&yjZFY1g-#r1zssggE;r=k%#$PkTY6;Gvqv5X4sX)fS{0)sY zKjP~5yB<)f!WFuwR@Z0=pq1d)>a9s-_|BVw9iVv>txSTw`VJd}`7EbvlMsNo!RJHL zH_SjqUMWZYE((N~R|^Zem?45ic*NxoFdrmFIWCP*+U__3G|yh`WESKC!+{41 zZ-Eq?Vhkree<;ImUgfTWc|82=Q`q0|poiWg+Qf5oK*ot3 zo%kE1(8~2R9Tae|9Cx}?>WU0(?8w6PDC=3d_g^qgxjDf!O)09`^&>ZanS4P=IWoY}o#CaiJA1+QxNVfh(HW|&J zgGx!{zpP67QW%ANVx7yOS-)m5rWD;|CA6=rJ^i^k8|hokL>r@zBy`}-J;8U590-TY z*C)=Qst>aLz4c}OuCP0Kca<}Q7id`qE`3lnf+2lLrbTHpn3R&G7(TLxJl%~`23*?E zP;Et6xvme=Hs7qi=2)SAJe8XDugXAzQxmv?dR~_wQ#hyTqg>@a%P?W1CKNm-<)mHK zg^p%}*N=EnuJkV16FMox?N^q(aH$FDOB4E6!Z#ElwIVb5r92LMu1|io^s+tjvR~x= zZw>7Wesu6yj%Y*Bgj7T)n!mJ;lN~$ABA+fv@cPCL8I)%-)urtegny!g-(SA*gF*9q zTg}xvFzQD$MJ|lP{4!RUn-sL6L|IB;5B2zo>GW(ZCWykts$j@nPx7Ulwo$f2Zh5p(*rgUOg(UPzJ}AXEjaoR!}6Q`1PZ? zIot~#%;n*;f!_rlW%tBzSZL>WTnXAAHGGLN$wk*!L$}-KwUi?a$(%@GNYjD)f&_E6 z2N*;&tgfH>qYunMLU-}JdO%KT!B#eH0B;GqDMY<|fbV`9F;BTMSbAt4DNq{0p=Z)7 zTP+;sdn%w>iZ~dsjbxq5r{XXgC1aG#YXI{!VS^%0Do~SZX+@8?<=0wW_ojjb;jP!| zl}^NQVee2NIH{rossu{PLyz=eQ_noVdKnKZc4q>j43S^)S(il88DiMEr_^^F>Fam` zNeK@L9N=4>o;BFvA@QfeJ5oPpF#GF&|JHkRFuMG>-7en%>Bw)}C*4usjV)DhMhfZ` zXw!0Dl(vV~u#*g@&RZj20|W+9DL~f(?x}>AKD3YNjNLq1!I%}RXB7~qj{CZE^Z=P7 za5xUvG;sp1!0BM_ zT~-iWlyynZF@!y(lquY-D&QyFPY91hz&oWqrOWTm!bjh$hvVfD7)8Gk8P%{J&|0WI zv7Yq+T)>oy&zutwF&=Au;KgC*xfH!lT-e5<&&Nlp=ip)1+G)l<$_*N3AJ~q01%bj~ z74->y97J&V(g@^5Ld#*so#=zW5md%d0h8L9&SU3&! zq3={nAfFEodlK(vVorhl%)fG~(n-icv+>f0wF$&aBbaKsCFBpQ6MrjDY@)v5-E6u0 ze>9*e)5|3l<(B1-hMZOa{TtRd(yP zhyOmGIZ=x^%@1s@%-&(3h8M$41@$I4EWO6hp=#6*^m%SEXn*pCaw_K~8gw41{2*GY zoL~ux7FHdZN%p{+^w99dgeH_$eSSmJrUXihYaAM~ey|eX{XO8K22kFXi3!U>`z&pC z!P`&3Ezhv9`KmFZ-cr){Ud8!Ij#XZE#Xc^&wW^|E8HyO~b zXI$O+hUS?3B{Pret)b-MrEhj|USOC^wUp(9a@$z8jbaGOOI+;yk!WZR4i8pTuP<1@ zr%T`5K43P$8#K*la3Bi_bYYaasQ*cIXqIp`5f6EU+RjB)eo*Q)XFKIn+gkr(5)(yAPGlEBwRc9pQ!@~u)D;6QM7};w!V0-hboXgVf2h~ zzysDSqN%t)OM<+k*zN8jD{v(>-CFtypfzrIRpX5^&7bZUUeep}PJrG1jWn*aHAyn;TRMKP&ub&(R=8)O}$zlMrNjl=2d&aUlFWkaH zx1WeD+(7$`u?Q(&89n&+wn&L#jRib}9pYDX@t~1Ckxwybf_O5$yu>ILZ@BhgPh>#{ zw78_Wn2Au|0FH0Jsazh?u&4Z4=Y-&Q?Agb&#HJ9b_u%DS10yheNPPZDIS;(&t5joq zZwY#>_Qz-AJR!%qF~h@93YJLZ+DMRY@jRFIAM1Gy$k=}oU#hGK*kqm~Q6b6~PnXy8 zm#RS+FMiEGKnc!dw0Kz~4l~DlZZ3VyADa5Y({#}L$)!MTl~+I+%rCcb7^C^%u$6vb zt)LX>YRHkEy{reC6?I3o#DU&}BgHvGaryjmWQbPe4z;w%ryk>{R`1|Ap?K#gn4RWxCLs<#1S$ zLk^xKF5_D0z28v4`%&Zu9!SYLF}@8un0l<}pdD%g8$thR_jw6G`||Y;)$>lk=A!wt zV?=mh9>Ij|VEFLN|?1>Vu}hW3mPtEeK9a zbvC$;`mpEoMr-clLH1K;7E>V(WIgE2kFF>aL;ZEK*SPxd?w}YZE3UsCj`)j*B%*wJ@Im|g z!^LT7u$Z&ROJ-6A(vxRRI;1hMqhz09lBI-{!hd1<)K)O|nqsqnh653D+1X=07cdu4sOx;^3{-lY%C9b%0p};H_aodWr^_2vd>`d} z7!w9h=zRsa_Fo)%`lKODCN{FSvYEg>k;lj}&I}|dyzjjnaRkddrJdu}_8?f)A&}7H z3OORI-xZRLA@0hzs9n@cKankZ-zC=&ZqB!GxGe{4N{|-h; zapEwYoYV*U|1^PuM<`i}a0`=GJ;}~7zz)qJ)w|v;Wmx)B7>5T74kMtB=7@`61``iP z!lGp4E2ryp&pT4V?AGz`2OFPZXOc1hwR=8IcGy%Tp-SU-(bV|evPagMRP zAX5;R`UriWn8=FfMP@xH`&Bf~@Y4dFb8l%X&iXaQVf9RWC8V>+C#4t*OVY{kl{7Xtd*&oaxoaAO(Hi#5(v<6$ zvOY9*?)OX17y@;SSluzTCUBj5J5^q#2sfQ?sG7xdLds54R^pL0JTFX$di9MH`Sdxf zeQy|m$0ba2NM8iyNq#!qbo7FG(_Mp-NE`UR@aKL>stQ(GdsWnF+!ek}@WQaXFAV+H zNy@$Ei*ng>4t?tZfMbl*KOA>}LR#(op7=NjUKn2@WA%p2*ACkC(KbMNZS5I@q9Qo| zU}3t6`kKFT>`n{+wgyAb%a0Ew9l-p8L0rB!J7n2zE(9R{Prm$9Q>rynC`t_?l6~w7 zlv=?ANyVz5I2kEYLhB5s9)`ADdPqlzBAeGoJq)KhGYV=yyMydU3FQZVLNHC`EMUim zI1)i!&V*HHoiBRt^ee^-yo5^`9-*8-(peEcszF`Ay_~tfO=SZt0}Iog=aEm;yDr;( zT@Z{aHY!50?BTOVi5%&iGL&V%lpU4DVfFlTwnUNW94efU@y^BzjyS%By1zm^6m}as z>Pw$6k)UJt=`_UoWtKWmW}yL{1)JuoQtIJWM9tg?R_P;?rnAxYfT`N;Cs26#U5b}t3Fn7Q3Yo#5S zyK5B2o|K1!P_eVKXOIrMaeXLX!x3`r_3+_yTCmnUk^Wi-<;I8=SG!y+pesmEfMZPs zd#q-D+D#FME$sH(7Ohc)UleJaSN8U?l-W=(4l1->hdphKowkFwmAaL5s4(*5y#r&8 z2sdc{)Qx3N;;`CC>Plj=pO{c3cg|Tf&o6#IkVnvn1Hu=^Z9^J3>@sU?+iL}RSg_e@}Yy>k!oMoW}j7ZABL(PXB#K>G}dIl-Df_yr~6)i%;9$HpYO; zdAy1_%GLPNy0!+X>wy6WV=R@f5#rs|G>p%ffkAu9RGgO|?6ychdq_tEhqe7vb_Kpb z>@K+OKw<{F7c=cr?X*B-VD(wkXGPEtm}RNEtq6&`Tb5V)kpA)D!Z)H9YFJ)pNZcB? z76^FcfZZQ?sDF)J@Rl|L51)4#FY#KSEtlJPpo4VMG!*HCZH2u+N7k5fyv@+YCm%&l;6&pON&-eq z_s}^ZHb|sJ!x}zZjlZ?^)fB8FZ0-!PxWH#yy|7XfEpY9+$d=8i2Q_2YEffqC;4IIv zz$a2I;K`i&w-{sw2lB};eFNMPXYd({mZ*cp(f8^>J3Z8k@U+o|FbqV;LMN{LGX;Vr zf`XKrR#5!mOqRd258|`0YwC$w1JlxhO;@fxf=_A(Ai10+Ris#i(=$f$U>YeR`A~@D8efA(h9&m)`SuuWq#O zYh@pnR6hfuuHtX5kmp0t%}8Md%6KSHbg|e@I>cCrKMKJtao}EJqJIPRtW3b%}x2 zK3R9TB>-hyL(|?J6!g~5T>n^u_~WA_ahLscpm9!XU9MMSk1C@kP|l(&G?9{?s_`io?B)==po>!$aT8$#H&iALY)gG`#*dS{Y0 z5OX+_lpTd4UXHf&Qi?Y8EeAV9o>9Y?FN!pGxdy;a!voReLPMx?`n5u*j>9zY!YX$u zBjM)e{&v=jXs8!z>iUo$3*YVuwdzf}z{l^WG=-kq!il5v2X@nl6Z~06!j_Q;K7C6b zFfP{u75BT%g9%x{P=B(k=d=MBZ9TtHfO5$XI^G))=@}wU?o-DT;zqFN7(J0PZV%ip zWM{AFs=;#;f^SsF_w=xIeip~nkKLWf>SkoeVbvtrls|gRp`EmQ|>)fZIJedcE~cR>r~dwTM2p`JWV za>JBsU-2+(@Ai!{5BZe_bFX_NJ?)YP#RQ9i4iLOuu79GR0PGu1`XfVTP&c6Q%=&{4 z5Gmh%7QU_l0k^OCuAB@!I89CF%|5FuhT8#sa>v>6{vO z&<1y9oz`Hd~!CCq<*Rtff2;~Jl_szP1T zXivAPKD;KiT@(Er3&stPoZdZ)fyzp=>lROKfGut>Jm|JP$WFdpDbII+#-}YV5jmnz z^~*Y;gVYS#%+n;u$rGrX0kjq>4zBNI<`fpOWdX2yn_FTM)YK4Q(`ipUyDh zu={w;RO4`0@R<4_SW{pMD@=P)d6mf5x?iw(3H9$WQQOpe+%N|Xf@~#UdUS3*6?r!; zUKbWqZ_bda>46D5DCc`1UE!*2+zmY=IMtUwE1Mw$nxuC>EXG{`@054B<;rM&p+~D& zj5w|q!wvK?QMy2P|NiCuH4Mf=zbPDdI>Bj$DW|ue4B+m)^tqwqH7tucgh4UH8nTot z+q0%MV07y6zqc#)AT|`<0*~0BOrzDW@0SMDe>##-$p@HBaLV!cVGg8`^NU-A#xTi8 z*8=Bsp+Mu-XFsHOyLRob6Qx+f6W@GkKk@Ha5b23ak(77{`BWPu%lZ-4Rf`)OO&*(f<-lsJq;GcVJgb35|?>!*t8I9ld*vp zXeN&tTv5P7^M-0OQ)3E{elnT(@y!x=>dp<5Xz9U2Id|tD9Vllq@$G%mi*#6KC2tIAH zD^FjCt{l?xfBZ6#K`<-U8D|T@e0#O?D1SLm)^uf^Mjf_!?e7+-q5e@qVW)e_x**hb z;pEma4y!or8ke(W1eIEH(dypD!1KVdywXGkUWcSAo#sTp{}1#FJr;<+yqlo-5ABa7 z>GcdhB2JQP0e_ikfIMh(sLx+LZ3Cp_>-7=YnZDFv z1h^EM;D2Q}?88Z-$d+P4$g8M$+dENti>K^+$?>x<&1zg{dE7u z|Ma0v?batrqbw-j{5RD0-V3f?-*|16st4+Iv*)JjaoES3LOGwze4u!QFzBYYH{^*p zTJpuNU=_)9ugm9cpz`_~e4?QNBu1<%^f%eUV$H*bV7w`MUa03aK4U;|c$ZiBgcq#F zd>EnQNBz+~=Fz>MRUmoMe)!yL708({u99Bjf`~L$t@>O?xSXZ(nXAnP9(MJVmJ=I5 zXHOM-#SaOnRQehj6l@1~|7rVkj#z<+DCGPm(g%t2OJoGKy6_?FuBhN&HEdru$i!by z52pR37YtK9!0PUGEgcP0cyqeID^1!8`2DDY*l(%9;SA}vZ@UOE$jUfT^(TD6xGVJ?Cq&N3Vu`B%nq1_#8zp4ibGWhd=b1K#7j&xcM_Ya9aLpw@R=B%OC^p zvvJm7CP#Sx_lP|VOQu)Si5SD2>xD=8tcGw_v5Fc{&$*PHTmG{Rv_9ZJb}_CygG=h| zKfFTvKz8!UnFi5bO#05szsvt(q2aHA!$q`&pMQB@hhlvM2G46iplUe;K@kM^fN zvTRKl>Q{f)^aP~W+~HNHQ5wxvKadn3_pDOUh5cNjmZM*Oa96rs&Y}bH2ClT;$Q1X0 zLp%l1rExsa>DEvUVWz+uFtUMT@q>hX%UJnMb%?p8pfl4e0&%M$;w^IiPP&z5#g#xzk*x(t;+g4D(tTVT*tNo?&!{op^^Rv%ECK=@aA zVR26_I4sWWGB1*YtCA8yPD21q`qWWPbOtbeN2LGEsSp@SyjSZnq5+ni9K8lhHqdR} z>cb+4=2h|$zZ&8Y4@gL8rG8!uHqB}i)NbRjNXDE_mTc79{K$-d)hY_4XE@s%mo?!* z@%+ozXq_8T-dssXJpNF3=`BX&Yk1U2_JBD+68Qy7zgaP8gF><3%M&K2fbqS@_e%aC zU|1Mju^RY?>D|^%*i5zn%}L){;W`Ew46LG_sYX38UQ0m&XfB$RYRR}0=LF2mVR6Za zYN$`TV!VCB2q=bY#;8INk4`@>+Q7$&jDUc^Co(?5&okQ4HzGbfDp|%iGR9vfJU%Qg z*gGc1JDDoh*E`HRhAJ8#q@hP)7Z&h}*D66f+9)g9*e2S6)6FD*0M#XEsf1cTZQP ze`ssj3PZbkO0{C%EZ;wKwKZSL+sxU2(*I{}tMyk3GiU$Z`9DyL!?I~dHI16)&}_O~ zHvQmH|J`{S%v~p&Vf5(HTl|fnXjD@zo3Ul18FN$+vYG1H%(-lqooB$8jzV|AimlnK zo&V{aexn&TVgd-+Z1rsRTsFtf)8k7e-#Rwmkqt=5rk>24^=z(OHuugm0V$P{&(3nS zmd%r>k36Ay>)Cv{Y)C?2Y$26Kp|{Xo%LXN4CQoGHdbUU| zTXg4Xv8cYhb!W{9c=ErPfsid$&lb;RODKtI4P}@unTVVdOVzWbbJ;RG^LtmdBO9Fl zzmR8QNIhFNmo2A4sT8bQ#JVh>7?m}bvO+x@n#)$yQi|PeD7KXnaiC{(<$AVCE?ZSg zsdZJd)zbfu@m#E4&(_FgYwkQFl1ru0m1V;cbfsryt$Mb0E*q{Qs}`)cVA(ne`P-2Z z^=#c-ww`ikt=anN-`D!TH>hVD=CX~Hjo!Rv8>j!@>)9At&o;?ro2oqXU0EgDECBP}{Eq8YpT5H*siMc!>P1#oUY;-Q$TG<3s?di(4NywFsY+KKAxoo>s z9IcgX`}BYFeC$xqcFbivrCMg?ww~5(=Y&l1RoSJU?V8JWONCLhVs}TjdqQ^eRQ9N6 zd*-seR1s^%u0pkzN73({s3V=q*Rz&f)~b!yR%k7@Wo?Og!gE`wXYIMHL#5Kwjf&ct zfGa$SVm%v^%f_ly+KSamDc_%!67#Mn(pAs8b6Hu5)XL>jR!P9|o=8^DESFWa>B`lt zmj1nwuWwI1>&<0-T1I!F)>pQ%*|@|k z(pAQ~lr1|TF$;Jq6YJT5x$K}+vVBD!g$_~Zz%_*`~^RwwHz^c1oa6LFa@ z=A?RdaxObXyG_mV-PJtmhp& zyD695oT_OR2j*K6aEq_Pt@Z4-Ty}d}LRY0$%)Bno>~0YqR_>`}_atH` zC+@9h_vNzt6HzXAl(GjBv7HkS*0YCl*~21gu2lPbGht!rBSg$4J-3PLJsgWUaV&?<+7KR zMpwSOn7xvKpFF3p*0a}g+3U(_rBJSvvNsw_@g&}?XK&@Qw^hGdaJH+It(Lu$py8gC zck9`Ex$J!_?j|)a-)oFPSJCTo|`l?)0yvX zD;3P<34S+BG1$DAsM~yqb2dL>ZO3AFFU~lq-V0z(TEIz|k}wbxbz2Z|&K5!}0q4rr za*tJwxlY1`iEpxnB`hLgQ3`A!Ry$E0tvTqKJpYS{UR?ANq}e{I6|Bpa47ld-Qi7Kj zybNI`jXlbWHaO^ae47pty{zcvNb?`9MO!}5@tDD2D+nDbbj3j1Y^6X;iEOf!g{~rW zRieyrwWnCK)dJ3YhF2H7hTt^`^B-f%EjBFJJ21syYhj{pYa`CtaKuWuGjDCS4!)&A z-V;-TM_{6E>mttCdWcmLrD7Mdu|B>g`W;M(-T)JIQIolMmYHMd9--i0M8NMgH1yjN|$3)$>K%BEth|_5l6|lQdvMn(q@jVG|vaQ6A z7QZ!l6(4q7+a};Yd<$+XI476~Z6#jmX(<(}owj}Or#ruc_#MUbG@$jY6x)m!6)7Wo zIJ=8jp7CMF+GpC^tcK}?awiqqgNeHJBF@S%ix+{A&TqXWqw7W}!sg(itV zT>KH_nRV+yId<8Rfq&-7A0_-~;l~hX@|C>hQ95>P@LM>4ocQC#pFp0=9b4$P6N7yK zQw+vKbKOoxoU>C9D=U44Qfs-(PQ~|BEYEq)PZNH+@G~5*wpy`Lvoiz#-g9!6@Uw-V zL!5QJt!QYAmQn8KhViCygZ!K)<9rzx(9i)Nr^Av>4thP$&V`~c5`8gg79`rj#d5b@ z5;%*C!7deknefYrbED>ake4e0UR5aID+OOA_-ex1sJNWso^(y{M|fVY6@Q)h>&d5_ z_v74oL(qFVeWU1`MBhxBIqyP!@2=S`fzRs&d8_c-gx~IXTTi8g{q&B&zxTY~Df}+s zcRP+vS}B(7p1{v@{9eqe+kJ?0c0XcO7%Q|Jt4XQ>cY97A6#J0ahsiSI?Zs{$MIQ-# zB}_5cqnN1MV~BJ1IAWDzOHZ-HQ@|6Lk&-VYyvd#v|CIQr$uoK692af&OyFlZ{;crl zgg;N5lYJ_j2ke}EPMo!{3q1)X`y%Lhd}Dqo`YX|2lV;+T{#qwW`=%4ZW%6`_o9tK7zlr|c z=`QvJ+8;sx$Mf;0=)Xk&?Q|90as_JwlhR(;OK=*@LUSB3n&XJIDJ@^D+Oz>*jVT73 z4ijjPBhJ|jh*e)%|IiAy88IUzen!HZY$owDi=Tx&k8njaXpILsn>CF6kWU7iO~&jp zc-^P$pj)%s1_b;SrUcK4iMsK6DQ9ydR(7g|4jm`B8B=zC#gup+Fwi1Lj21a!l{#AF z<*~K^z9#x)&&oj2ypTq7oHUCpZyh!$*dxAYwW~H2;orESEB7;|4Ia`i~ zDsHK#1J!H!z{{9ouoW;-x1or0wj$zm6hZ@yZN-%^BPG9f!qF}lzl!)($>Y;htk`P! zn^2yi7;JUSs@ocfbG9a8C5>iCDUYlS3w{pg*AlaHVx-_r1hW%cWys?p z&TpFqKAUHJbKz`cpmFXvyW?<_+cNN1JoT-Fj~2c)ajs_t9e}n?z&)5^ux&9>w;bY} zZHHLv*;?wU;^NJ=$BeX|M<%?g%?r@V$sL?R;Op-}Vl8m{LaCdBH7$TM4rs@Hjq+UJN>lFST_qzB-8!r#V}@QIkH+kuF4b`WCW zXcO0P(RwhxC!7^e_#v36TOD!E4n?ejEVs7eg6lARO*Hpz(UUMyx5E+V>oJ2UV-96w9=*}~5u&f3^oE|qxZ zIyd;Eoj*_f`Qk4iuY<0Y+tI3=95@S%!7ju^-7Z3$vx^a{y6}FUU4n0^RBlQvn&yHp z6MQ*g*7O!N0ImpnBd4zveU<2|Nylzyc1_^tIDW10>x5rVoV#>Oxm>dw0^ZHx8wKAa z_-4Y)Nv&K%U*9c3pXi(CR?)YKzMV9aM*m&G?g;o{Px?;5cL~1R;ZhkJ^PYg&-ea(P z1>Yz5e!}=P<@5Fc{-(kcNAy9=s@p?|bM`P|)sePBuRRj%s}0y575kXj$H}HkvMMTf z3l97|YNpiRbK^395t+@fgms9#pHmCTx#LrEh1&K26 zws`_RA5#oAFDB49M~ucfVr|gYt~Of$U(%W$mCz<*fug5e>_TK!&GW|ofkAk4O=loDs(2HNI9w>!Oo=nX}0L^|ymt?1sijRT+FH_=Gpn+V_3 z@s=Lk$3@q9;Cp-On+xAU_$cD6i0!3vpKTfNYQCOZ2_7wYYr?AKWo~~o&x60g6W>;R zPW*P{S&A(f<1*H^4|*4;cM!d!=$%OG;8G~H8!LR;Ks=5x*e-&170dw)X$LCSO168z zM`23v9+*JW95K4i5vvH%rtHjDjIG*~_Ku!5@0`@FML2uMm3B9Fe`^bPIfr?Vzi#b< zI~>+3b_UG;7G?bx8t#uS4c zhl#o!k2q&1Al4@0SOi>_orv!#@%u3){3J}E(;P86%@Hec9Gdg!Gso9Nv%w_#G|{Jv zK7%y>(b8^b2D)t`o9ry1%vR3MAgPkYkd?^=D(HW>8w?QTc z%=v;Z!~~k;h|wfREVz)b7VQ#zOZomWv1pPDzD)4tgjoZyjs^C#*%cuy?pe7~!c`Kk zroh7ru7uGT?zU@!-_H4K#a}1>dh*;xtz~wb-VpTdo}C*--z55G(##IZCY`fx34yIh z2D??lZ4z#$z~rmA35vFP&@*6)!S2LF-R?r1v%3)|8Uv@$V~($>Y}iT@eXr>IM6+5n z>hAc0JrMBRm=gRTCeS=bjOICF!7XLn2YCeFQp&p~79Hk-9~1nz!@Lf*Cjw@pRw+Lz z_$k3p6K089LE|Xwnc#V4ApTkL&xwDYymq-73h{-2x5kv<7co({mk{UdWyIPkaKizk zC+rn`PZ?+5xA0emzb5>3;w*0T$>1{Nji7nOA^J_xZ;5`JG|z@SxZs}VJAw1!SNOZa z-xL16<7ifLr}!Z786E#n_(#G&CeHP2$BiNTB;W--=6Fh{J*#5^iiL0!+&hB2ob)5(}##tbyL_*M+ooiW&boSjMR%wlID z%T=uudvQfRYtVc9)|^fB?4su&JuB!qD)ys%286^*Oa_}%(p-|}rZgMoR(j9`EnrL? z2B@^zJYli3gTdyNHJ_~cY4IS#YKfC|(H02dL>C51SWv=36!?#p78?}k#!MK&7RD^J z(GjDKj#y~EV9l6Lr9XQj(SI&bxE};Wp`QRI?hz7d&q2gC`9z#ZPm0q=#f`7`>Us?Pr;#Vcl+;Y~>Wy(VcEc@;;QycNqfECgNxGuT=Z)|N1w0#_G}3=D)uCwlP1eNP!7 zeqHhFk!M0ZMO!~mo)#Hw1ECuV-H51$A+T4mwFfoXHV$R9=VqjoO{8o}g$FWLPPFbe z3!I%I47R!OErgG9oLvSuMQ#~5@7)XEO898uTNCHHq8Wob7iQZ8&qj#&ZN=xrZ$}=V zCJwlVS>6-h0ki71BjTLxgt&3QX)BB~dWoLs^e&=z6}_9&=>NyTZ1Q6= z5WT19y-4#P#jXz9JJ@|a<-FJyv8`lTU;2t|Y^Vp#(-ngiM7N9XAkBZ|an02k?0h1@ z7R8PcJC-ay$OVpLB}}Ci?DuSRVHVoyh|x|*yfb%4G^IPykAe$n9WX1Qjdm@QX40zE zxTy=MGlBl#+36A5E40timb{G%bS+2wg^m}x4^ggVrMtuS4L0*SH9_otV)rM@f3)W9 zfIvGv*@;3A6nc=OEv;p^mn;+hKubj!qJKxX>er;)4x^i|ivYm5TWW z&+So|g_b#D^p+zQ++M;qIX2)`y>O2ce7xWj2(xP75&?&j6N4V-^hu&m7JUk77Fu6l zpPd>kn<5N$n%L9DoAyoGpf80-mVu=8Y` zFXI9lOdmBGSLBm}KGbt_q3DZ5U+gq4Yb$7(2fe%J=2Fp@iN2gPK23bu#IC?pD(|rl zUx`_Dy9#m6u12f_R3ADua0Iy~a5nZB>{{X13BR5=^IdJ_8SsXn+0n>gH;TSV^v$I4 zX{z>jx7sb3N;zrwq;JKny4{92XSX9((qp^Jecg6P&>UI9V0VhXOZ45OnM^0HZ0`xS z)7g8)-Y52cXRA0vJrFFrJQ?gku@8xTm@Ga`8esGYrc#zmp5;d|3&U3sWB3YU9e>dp z$4I?adm?ytJTusn;-3=#GaBkc@?uTeg!ecuOL?180v(fv-U>dt9aUP3V%!Z z+r;r{>gvI1=^ad^v{2PNXo}lc#_90 zgdVjfKM7$e;SfHR@R@|qDKMoHy5YYFmcw!w>`SpC+G>uWoc@R^0rLY$dl?rqk9FJZz6Hk;tt1NPSNwe9=O@p+ca+L4z!wO78_)Yd z;R_01$nm@$e6c}+4^Uo^n}vlhB79NeYFc4r1-h)PIrysQelhWji(i7gu2?yKxYd>n zoVQ>ZY$@SO3tz@@o-xo!9UM4s844dFd|BbkIgVBh+l9*qK7XRoBo{tZ_=?1t%N7i} zUn$s}vnz{TMeM3%xlKzvTdWo^hk!BI>Vnr0ye46`fH}$|j}gQ;(+vxST>=cYmXx)n z45z|^!4P5&eqASU4oqXP5yICMz8-OAr^++$`T_GU5`%3ZctgP(Io!>wxQzo|$>EWL zHxaxkVQnE@yxC@fpWyiB!nY7Uia1vYkC@q(!7l0SR$@ns-I^?uMjM<1)3*tnV<#DG zTj4q3+Y!eH*KZ~C%xsUzv{OGL7W597g?2b%w8IfABkXFWme@s|-9_xKVs|6U9JXN~ zOQ4T9x`)s`h3-X^1#P_Ry?4O8cgbLR!7YMY33COl(8c>+ZGrPvFoP9@w+ruZoR={j zW}Sf_?d4GvK1TRh;>-&6{Hm1#-qO?V65K7g>~Jk#9cz_@~HXiXDc@ zl$ZZ{@{=$Nz2%6}TaGwglN9X8K!5jyj}m&c&|@4$|L(DYHWEcIxzOWow0pffR<*6lV=_$$(oq#wjAbdDoN=Qv{3lW}eKNTBQvX0S(v zJ|^^WqAW#T_VC8b6Tz1}=_kcMCH`sWb$bDwf6oNJiI@7b;-3@$JbA8V4PBo0LcqK$ z!(cB8eo64lgjt4ZtO&+tz7oRA!XdmW;WY`bQ()@WgU18j2$&;880<~KZwY>zFb~Jo zA{r+}dnfQYJu~kLe^2=P#Qi9U@on}&@E0j@B>$oKkHmlMe7ci@F7n_x0*=8x75|y| z&&f0Ic;b7keG%~6LIHm%_$$F*6UGN4g?n+T{05V0`*RpRgMEuxb^8u+&b~)1x>D{# zi*gBX_X>-URgN?u}j3-Bo@#Kh=b_MT{u=NAp)8P#SZzy;p!rXh*RgKF{+c*S1 zZpL6EC2S&LQwsPXGw8}kS2(6qW^VEHH^(eI?THuz$`NY=v43pKU^$46!L|}RTI|+j zbpXN}Vr-j$ze_Ay--2_3wdD~MHuyq%zXk$Zi9nzkX5m3k#CXsXvGxbP0tds$@ik?d4=*uTkLX^}eWaOiJ9@zb zUBJEZ;JV;L3A22v)sh_+?7CiLlf)h__6V{}cuXF>MMnmFr6+up;G+c}<8Y}BhrVM2 ze#YVB1RpQ>1j6_<^;GZ`HltWj9UMTb;p%)Y7 zKWexma7nOT&R#0^GO?GF<<8BHL3GL46~Xfl)1q5eQI$B-{iTtRs3z@Zzs=O@gzpx+blwqBL*6?~uI`w8QNr)=1ew+Aqt>P6M@2QdqcZNzA7BW^T^aG#aWSv?X0 zpQvE4MA!fej`>H7I8sjZv@Q8 zfEnyf!EXtEn=n32yi7nh<2#s6X^-@bzl&Mu{YH%5Z^Vs#iv{>W(1&{BABz4+^v6!~ z31vLJ88lC<4ECw$&qRMt8lR?CJO+hFNxr~z%E|+Rfq#iv7>WZ0T-h7 zO$Z!($6((|_)fz26!?#BJW2XPuq*m{{wVe*u|JdLKPp}JOQ01;e--+h(BFx2TNb+c zFy0?QPwPeUr|7>#|2>6nMRTYLyHQ%l|2RDjW}%IZ7;S9CN}EH8%QkJ$ZB9=odV0|_ zkVg6(L&N$yW8jB7K9lg7h0j8q`&vr@4>!yj@HUvmV6zFHUGN-)xt8eE13e(E6Dr6P9c)dJ>Y)(p0~;57uVNm%;>+C#&FUB=gREwO8h9ZnV>HnlY{ zWF34-rO4-R7;FS)p{%H{@zsvCrgl{N(BjT)GWxTAa%Qg;rZKp?y z-bD1Kr1=l@d2SYH7c+rin+x4S=qREr5Dib+GT>fMcPqi81#e9lA6zkG$OO)=+u%!D z$pf9=7PIi+CSp9eiCDRAFLt2cVf&z$_2S+^^p2u;a+)t2={7DXrOl_-8EhBPyNce8 zG(Nno*U9UN-SH*yD~krd2WFwMjTnt>#Ns(}gWW@W2hV|?43-z)BEFS8t1eEoRow1q zv$hbn@s%w|XqV7I0Uzu~y?Cs-%{uWVW#~j-fg)zrZ4Ba^jYX{5h2b7n3U)o_1i`w* zc8e{O<>p5ZIkH{}dUMZ7CfY<-NpqWysbY_-1W)b4J(uPfhLE<6YD;&Z^2?t6zh(bEhp#vUc zPy&C!@k4~yg&#^>N0%C&#phVB!$R1^x9KDahf6qug3`x>3OF(z8Tcoj{!zk@7JdwI zd@v5Nf+vZ)?O1$CTXZik_v0`N`RkFSl5LMC0r!oVhT(gFQl_eg5AzDa;ezM#9rltssJas|63=#xAb zw~D?^^zEei4{R5^BiN(7B<>V@m)N_V#c|p033iz0;$E@$iM`)hjyQiH*y%kF4~l(A z?89W42p;ZvB+w;23y%tYOz7i|=3DHEKpP|);6k4g`ZQ72e-7|R{eLF#pD>fbo)!L_ z@aKs$6KJBm5a>Jtfxam8C7~}niaQvu1p0@guL^xl=<7t8i9DXU>K|`!1iql-Zwh}) z_}j!;MD2wZdneeXyeQrk`<~eM$uenn?cfCULEwBElEFR{{*myHiL+X;T{_M_33_R# zKNbC%=+8;xgL`b~2E>8&3w%jOga7y@{1UU!{6>uCH)3TRqx$WeVBcUi5bRsA---R6 zEGvE|x^wM^fQ!DCKMMXy@Xv&~mKamYp$5MM{*34QSK+@2|D8C~?&!fu>5qVS@wERG z{FmUr39E)+KnU)aHsP?4)^R?^r@<^VzY(MPjaWEFA{X)0+O&Z`h6OU%bi$_>J_B)P zrPNXDvKa&BOLZ7*Cc!fco`o>C4sKlGcsFa%>w9U>CVFu3*gafS@ON+H;DY zOZ42NSy;HXoF~x1j?OD|KB4myWj63SHas-HK+t8Vqq?Z!EwD@JnYcH#)k_-;K z*^?h4d|Bbk5ob-bw#g7 znwiNX?|f&*`oS{|2HQaVhT=CO&;OuXt~QS?MU;H2t7c- zkBSR!2Ysw3ofq9Ax|KAGvD`9-@11B1d=<}kL3q3H4&wOW{fCXwhMoA@OeD*UBbJCmx(j;t)+4gI=h2D0`nLw6K$faq`6ef_gO92 zYdq&YVtd8*k!5XU=REi6als$rIq4TaUi?1fSunW9WNUZd!1*=^2Ad#!KjHfm=O$|D z$Fcl?fIEZ&o+$W0!3Q~9<1^d`2RzK6*XNchFXS-7}qeo3HzdCHdxy-eukL|NQp zis%EsBH&xS{H_#ymEfxhbAf$030)H`pB-edYsFqC_IhW>jT>h-1k18h+#&gD&#K)M^juEgEBZds z_mk!lE1ea4AlTi!o<1n{A+ZmWWy(F>>_&bh=!-ndkBWXw^y5xrSUGw(o(TFEPy0#H zPlHY*F?WgniT|3_hYb0zu~%|;D>p6zbpJb;qN<+chGal z#s`59QrbxUL*X9@|Jd;odWZ2a#3zCC<%|sWsqoK)e@>jEnQ-H(tH4pH_C*-$xbdZo zuVj2pgV{%)RWC<>2Y#@x?YF|e6aGDM{sV{a9|BzyGZ^egp+5=znJ7L@)$V)+Z=kN( zFZh;@CiD9y{S~w7_8a1y{f<~!X~Q^lTsi*{_~%L;>HjJGFX4X^#|H(EqZK;|o3Q37 z{Ud!BY`KR=X#k+Cx`(;6Hn@%p`nf;j<7= zCqZfGiDVOAIB zl`x-#`6+M#J?K`>4tgU`d@<3Bi(Y~>bB|swy~x&<4B<}C{ZbN^maq&3<^~TD^3XUqaK7e%!G;K5 zR`_y`No=y_RP z^cteqB(1#(cNEb_Gc53$ujyLC*A_mUIMcyBY+EPTB|IY|#I7rLJ+jP5dx0 z#_>d>wu*Le2oJh&h=jU?LtVhHwR9^_hlTL9a)dlhl5n_$BPeiJ%VVrzryUveB2FJA z`e@O|kY)ktMI3x8Gx$54KTiDd;!hyY(#Mddf}I%fKrhXc1fMMU6v8Y`pcn*Yrv_f} zv`-U$y6`iI^U4+nw`Tu&C_6Kx6J0t>(%F*Ep)@D{G!2wDCZ(M7rRr{yXu=uMcit*B%Fo3l3Xh1GC7yi;XM>SK3c3bv!l6&)ALxnBa~UZnD3Nw zmz2Ay;DcRoTytlwRza&CU(<@L>&%Zjh{@kw(tkxF-Hvcp}ddeeM}FZCe?>#J_7p*zNQVMIvLZ;n1ROJTyeZA zq4{60K$tPy@xHW~%?doh&XO}t$RjglQ-T2`CfG~R9m{Z1F zGUleiBPmB27kiuW(%n4nJkAr=&Awgdl{KHN`Dt;-wUf(Cay(alpmRJ~+~ zD|r5vlDM?QWhipTo1!#xtb`2??;6kI5P8eWTaF&H$U|!lzpXXamJj1A%xAC_WDJ$D zA`O-Vi;bIAV;)xu={nEe%92)*v??X;dt3}3`M1?V_=1Z-u+=53Az@7l%op2fIBMZn zIfsR@yw`=bWUMV?I1O!8o{73JU=q8=I^oRh*%~2dT{-K~VX39VaU3XZ{qXkjQrke@ zhVnL|$JOa9qR@EG+c<=F&)i4}n@HG{LaIKXoAKVdBF@H}g|xpfWphbeNE$_nOTmtt zPGVbzz?Yjb*j5roOW2wMK1~(eIH;y$);9Q_4h~&j0^4F12JIlmpdG~8)&KEoBpiS_ z>R|hD&vSPNxjV|;iLTm9q#L^gm!FnbR|b*AUF7U4XE!>2Oyzqbb`SnsFStF#?4sm z7E~>aMSYEWWc14DqrsfvR2Jht#)WgbJN#PgSd2sRP3a1DeQ*FcEVnb{hvb1>$lTC|TBz#$Uq5)O5NO-MYm)MJN* zu!?7El7z!09N|LR(a}F~WC;8DW;sg2(GrfKfKL-sZ)ECNd{3FW(3f!>W??`OVhjjE ztgXUdt!QS;(@qTIN?*uHGESCp3Jq?Rl&f?kIW?RE-8oIp>2l7XgHL**t{H>+%XTKd zr;NSg89NKJ@RTfKJSB@*i_t@P&FMX2=Z3@I!eOxUDQmpJIXR38p0NvM zTqNUSH?W^H^WhKNxd`KKH!hWNnT*TbXe)c`=ZY}iSDuioD`i|I<7yf_J)&)p8hw7d z^O_Lf^hI4O@j8juyEyedQng8L2(joj?M8_=NxYdN%Ymoxbg%T5Fi!D2-YVlZ8Mo6& z2Laq!ZB(5*!r9)lcc+}YFZGddw)C}mRL)~^9;c&vz$+LK5$y({fKY41~GF{c)ATGR*OZ0WiCP|inkKBmKlEOrHSAh2fgZr3Maed-zfRMuy* zKBt8bIxNsF-i&c{1^WWuQ|79!e2G~Yr-T^eln^&sZ#p`sP5MnpuX|nlR?>HpzNf_V zsGp;})%QbquekT4yr1O#Opld^&p2FH-q+@kY1@{!J2^%04!zI3)X)p`dM2K-sgm|iwW9NlU8%oVfZaOK`OPPTR zK8?%$w8zbe8EG*s?#zT)cyJdn9^6HoUOdSQ)MkHi->hL3yx3-wHM^`iXtCIEGq5>z z+6@R}xEpiIm`ldoG@{tN=Wd>mdcD}@l{BBE`6+R$rfUrz;8M5J0^u$13mYhJL3s<& z<16;kZt?%RB?g5)(HFb0^hKmEN_~Fl{}#P(qB;E8++R%o;_{cEKL~!b)^RNRfBh6o zMk3b(Ww51`$kIw=879JY__x?NUJnlaZLdB7u0R;9;n90t2ERtsqYmxN%eOIkzHnv{54@jj1qQ7|m5 zR^P&F$y!_1a9UHcIHliUo$z`*izDQ%D{nn|3t(ws{JZmD{Yc;r-?kelfen?wMofTp z<9~EK=xn`l`1^PUM#|qr{-*TV_sh$x#Qwy+jiqiA<|1Cu+se$z+>R!fI(4r?Bj2`txOLC*4sv&tyAxfu zg&H~L{+s$3c$7*9rOiCMyGYzs;%*dqNfpM_{S3Q@d;$;#+e7l6lJ}y_ci#NFgCg}d z>>c({U*Ej!7TK+|x!X;-K*eUA(xcfHdbjHZ>Fv@xs1Kw*^?Q8N#cHF#J0pejJ%yrD z7^4)%G6mLbUEn5%yP%cAI~4zAur7Js^2+r7xgYW3wGvjFFD;X0vZ}Or$M2u{{rA;q zEmE26sq`q7UZv8GJmA5YO@zK15<&;-M6E2PZ9vPGj>P z7UpYiPLg@J%p+)WGlijzdt_M6UP4F7I$G8-w79%DNct789UEqgFYh>+$ICo{Cacp_ zm({vTJ~6yAe2FK?J6YZ-^tc!Kf+$V7Ts}4I_1!*A_UW?Epv?-A4pRT`TIkHs`&>Uu z`q|RYq0S2NZ&63sb3=a1H|lwk&zF1wW$nRi(KN2UC^jyRCWk)83+zJa7fHXEItxsf zQjPtO9le)?w}UVCQhArjyPTdj(bRQGmm^n%dA2X{N|{&5yqYFEoYFfIi*`-$3wh~W zEB-q1*OO;Lt$5i|p!Z=0gWV|fCZRVI<&9PRzADD)-4gWNo{?Kc-zNHY(yW`_Nc~34WNc zZVTg`12uajXtpsJ>`~E=iGJMaQoaT2`b5x=dm%n4`YF**ljiDh48}9T-t24mtk~zo zK2Mg{i@59BIu_&Q?1d2em=6SdQNl|SUUmU*-mT!Rop{?)2>Oz7v1;$`Sx1iF}`Ukd$7=+{J9zFobo_D!&hI{U5I@5Fvj zmbc-1yYSL@lOeVN%)%rGt^lqc68$E4RtV;V5{TPU>2_45aa3%vDUR8W9g?2mcR7CVAF}6 zUhE8HwUhQ&vy#mibhDStOrmENJqu}GdZj}vuEuTFFfz}}Y%*q-F$WD~zj0ZZI#mXQ zlV^qyY)(0I$(frDm%@R@=?Z+FFy{5Om{-PpGUlhjOI$n;jN2i4nbrcKOmbzQlm(?M zL?!K4>>R)?u0bIfvxHy^OISq0q7=A{R4a5gXb$C3U&dlm7MHRF6}~2&jX3tU_4UQZf!f;>C5DCjlSdIb!S zE7IV~uv4U2cj#@UP}avk7;I%Jt4LXu3O-l}?;SN-9IIhYs)|isVOGa1yrBs(-q3_t zNA>8#Y-Xhy7S;z^5EiwTthHqgcgqVPtdLILe{#n)}zILjLDm=AM6#r{u_wh zQ0zuzSz>71R{3x}o@WW;AkWiC8Jo!1l!kT`J^Hj+(Es)I-dyw+qDPVDQIcNY z%;BvBj~2W&VP(8+tZftQ&7SdX#pc9rN0!+b!)|ojKInU$-a+(^qIYr{`yEbs+U9FKpmxI5%St!p7JMm!~BI=UPd##NrNNiq(XaRd#nyH*9=barG2*SK($grg-K z;{tbhHCvAjVT7;9aT1P~Z~_Ik7UPK5%*C7-)_1;`lVqJN>l9kt^kv=qKQ-u;eZ5Z; zeY)r~NOLXFFWFt?4-1|d!dafFvm~4?;T#IAwtCOyxdFe3Sqyfb;PVAvFa^fXG))fp zxx_Zvg@P{aa^li66o8WiA#lECiHTmYP|H6Fe<>V2%JBV#b8$oze@Pk#98Zf z6{@$@UK7R{UM$zjxK76PG`N5^JVSs5+z|LTOdY{)6n>NNn~8JDm9ZUmOR!@->08C# zCiZr+EWrLeS}u14{GsRkPQiBxzMC*x7U_;fs>=6-a~S@^VE4+oPtN^xcy0HOYi?Yg zX^_SPA)o2FdraXj6N#wF?o;EQwiWy@Iqj`SSt3t{kg$r$WK8869rnFhNn*mIr^Xj9hWm2gLR z&R&)Kn%vjva%qhNAv#yz2xG7>?M)eP$#|OvTR%7l@?n|A678MPj`0P(EA2gL?^EM; zZ7fJXMEyZHi~FKJl=G3CkLmCnfQFVHtNSGQ%P^P0J{A9&_|JpKahb2C`Xcy=3CGwt z@n4DmnmiZGHu5*YuIYLCR_u3TzbA{2_X(+~Y(HR5I>v74(vO&hpH4uGpH4um_84w! z)%g4)mh(#}{a#MLO8HI7?^L)SCdK{;e4OKd3ja&^-^AJC(;a(tySEO5CHFHq?tMrz+E8lUp*@Wg)b<4A>v9NPwQ}8)u7-v_v9BAzliun z$us%RB7XR}+nNLK@WNb7_~OEsAU5xgp4CO!teI(!4q zYQbOb{OaP@5WgmQwQtdy8)w6UzR~HmM6WG+IBBNdg|~Z^+gfa$;OF<9V1)Q}#ji)6 zWq@OgtsiW&7t01>Hx#=OS+x_5Z(ZFu=w-Z6MvC4<^rlWLC$?G8^Lait7rlk(QKXsg z4*YCH1<#Uf8T`(^o?D3@Eq-hAtT5>{va;>NILD10 zWb7zoCmK9DsH-w5+$GcgcM|@=V7o}!RmyHuczACdryECk+dZ`9yjb^;wx_hcsNqTez$H0t<56<#y2Js&SZLqcl20xy_ZL zlrd7qx`Oia)hdN@rsuFrO1G3U6&5*9t?Bxy62>}S2QnEZqe^4SUfrxm{c7PX=^5*h z(<`Sh99~p3R%To{v%AwTXS|$!=u9a)6gf^9{JmWMGUUGDuHfzjx%WQSO0q52CB}!>u594i2Zomv@Mqx|~Dl;NyJ^jWs+BvrfUkbvHAcwRW{>5O&NjhHA36ylx=-*4^ z?}wZi@~XbXlO&%k`4q}rVq?ciPfVX0(uyvfCh2rZXHeoTkt9X4tvT6ehP|5GXURTW z_Bph<5AX+PG4z!^BW&_?)z?v-|*wEL-PL8xzO1l0p!?CuMCP{uyMQ8|yvd7REvwT4&NPlWQQ*P17#JSF96DpPfrOgRBP6XLSI ztY;-YC-HfTyn{8>?fOO)dLhisy)0go`I5|+Y4VARDW|SDXulHb$zCY0N_|c0>r|)g zRIYjTcq8oPJRj+iXlHv_eJbrUX`fSLm)lg%KTil>L;`d8I)AAIzET2TGXcJu zENz>AxL$1EL=reTQf0Xurl)&$v!2c?8ohkl^1lIL3|5FM4r3C(F0vuVD)_tlycIq=n6ILx9x>oUAOoLgt zrb3KsD#Th1?;2~ATBK&vMgrG*IZUSnrdI+pcmn@(HF)}&F%nqMS7Rn6FtZYvg$Zz! z$p1X5YD}_0j0t$npF&o)$Mt8t~us+8B7;H{ibIF>U7WeQeCls$S z^MqOTRi0Pod@|>!iOZZ2>E%u7XmyK4h63qP%a7(cCnc&h#y7>%R&pzwzHCR$kD zBJvic$Mdrb%%(MmbgD0GF-eO{T7uG4hmzQy%{rbh8TQh?$faa2EqfW-tla-Pltd~v zI1)I{H^~qsu&feTjtNY4+~qMSofno5aS31Y3KEA(T#+J=@&2<5XcVp#$`Jg6!B&>C zij-BU@WNb5rGoDD}T z4fja#kX;YEb!t7`woYh2_|is5TUXk8)OdVqG<5z)9k=x(iO0MsH&7BADv6Dlgjxz5 zmD@N6Z5+;ZzSxm+Hj%R_9o|3qAMIp6tJr3d#!J53&6UO$N@En$U~6M+4?6|WNwH-J zkNQHlk}z7r))bggTm+#Rm^wDL3FjH#+}p~@$=Qw$`!E}0Z6kBrhj+SrJILEn-cIzm z&Bu*vMvnrwOxk$wJH3nOT}AIkT9@VMUCZOLeD~o0@HN>({GQ_XBG2Xwe_$tF2<;ul zoL*RY87(qeX)r&%e7%&l1^u|^ry#mrbO&h`E#A>rz>wjhb%t;=W;0k(!Wao-DX?g{ zJ&li`qv=x$=Q+<(mz-`nWjfpir+R!KiIoty;*t<7lV}pF6m`7evo!w0thJCX@FMGx z)GMiv5?6`GVD@I)xDY3F9T~LxCmMrFZ@98}vY5nF*rz6TLra763-$VEy?n z^qL(I&Zb_<6XhHz=O8-l7sBi3yX@eA3!eW&1lI*0>ac42VF5qv@Fc;93qFD{yFV&; zu@L%@jtqQP&-_usj~0FmaptfazY270utS_ZPVDhwPaw;xhjQeT1vsso7{+Iq&tNCX zI9bLiG*~pgpVB!soX30xPLp%GoHOX~jG3OmMY~16`g3Mjn{zP;c9yKOWt~HdODebG zZ7t2|`KNQkS>4y?JUQpfxquEgGj43-RjZSOJ^=F=>_X8OiN2ULkN#{sH%7Z$63S^3 zpj;~DGAWl+`By`|VO|mD7G4Nf%DhVE)ihI`;*&qBS$0i0+G5P$wQ{bLb3Gj%Nh+K+EKeb_Bwo$59BR#~^nx}6q2cse+pW*S4z@4&otn7hoi zJ29(ncOlN%-H4_6SsR@Je5=+yp&jLUyjR+N((b3m{XOA0xAFaI4}^4xOAktVNYcZU zxTQolYZ%ERpsMlvscbzHz1aOjuXp ze+>34X5r_#5##5%5l^*g(;&DPFf*0jj=rQ9<-8>4Wjf5BXdl)6N=Q$59eP#LYm#24 z#DlDkNNV9Uw$&RU-o@-8*qajHlK3`7U93(0s7Z6G9`A(P?q&b3-1p?ZPnUx_|52%? zv`Ib){WM?E52b%3{bTCvEc$0trjhSYLjKWL_*2QBN&cKNm;KMG8TVnn2>o(j_LtJX zlKwSyHcQjkU0(t3)chvw4qx$aWq&98d)ia4_`fRp522so3;$92Ptt#;uDh*`8-V|) z@V|t7v@iQt$-hbdoidLzQ%kAd{}E>0%|B)SCG&5Z>=2l;t2BGI|{esdt&iE--DV54t*?)aj+pK$R8jA3IKCl+=vjUJZi5X2L8y2Zb2VK_OPF zOkEi4fNV~?(5(1J%I>L2!!0m*v&)-<9@jJt7yieNG$7QSJiBvBolEN6R9U=JOX-SX zo-m*E8ac1b`DD)TW}~M@HwV*GWebG4m74=)E+}&$n%sG(D2=im6yl{WE-Y~oiHlNX zW~b`Bg2^FG&0${WtGSrW#bqwxW-6hnTQf_Bxr>`i$y{3IGBkBXHKhfV%4cwho4Ghd z;<6H#qsZf0qa}x93|`E*d`PR~UktW_q@j{lq{J2ueghlhc5S7=U+@B1S@Wd>4EZi(0BXLUiEd_LE4VecB00nsD`rOGN|9U z^t2$(XRuvl>?&h78r-feMgGWd(RL4h2hrg75WlDRy~uOBvPyCX!uuTe4&^V;R9;Gp zlvXON&7zwdZ;)vVX=5*df~0my9hBJ8OKYO$t96FM957f>&KNmk>2M#{!QmhKPASaE zp2aSi-7?EGc@&&lYOGx))HS$71k0qF)GAdjbV`BvD%QgM+ZWm+vsY#xP2E*vDK;c; z*N+Q#q!&-W-0^bvarYlr1C7CW`-Z!P@{1f#kh`DU{pm8tQ;m9RP6d2Gn3LR`DDyy> z2e~=bMrmAA92_P$8iO4ovo7;cn*0w>PhQmy3+GrFa3;w)T+R`6xKXC+&v*66P&f6W zJxc1)QjejEkJsv0cUNC_eFPK2A-LA15-YMq~+ngBM&aRy#?PO`E zP)jH7Si4x{so@^y?rCyQmwN_XHjsRg{__6Xl i!RHIUfH1cLeqANGvyrqM3| literal 0 HcmV?d00001 diff --git a/augur/tasks/data_analysis/clustering_worker/vocabulary_count b/augur/tasks/data_analysis/clustering_worker/vocabulary_count new file mode 100755 index 0000000000000000000000000000000000000000..c194ea584214104b489b0767448eb7e489afb149 GIT binary patch literal 45614 zcmYM-1)Lkz_crkD{|9Ypac^-bP^87(ol+Y|&h{n6juhP>0!=BzADIcq>~Xmo2$7-}lzLva#v^SM@?H)ykI6 zWy`2iYDKG;urA9cQ+G42m5tA3%jvI_Djle{<&(JF3%WupTQQfdq`%VWtz|2x|3A!2 zu}UjjHJ7cn^Kg_R)kbfYt)9>w{%6)`WozcLwNz#GqV*LmTRV}tJ)d=2*}A!GJ?#Zn z$ktE)eV^yBK`YxZmu;kC^yMwvIQ{=E9==H{+ccMLruxkHX0>ed1QujvV#+48vMqAi zMAb&2TB#HZjcm(gu1lmX+p3jK%4J)tm@xHbZ?;V$H#^z3mF05TcB-&G?H=1FahI25 zhgP;@F54+pSfQ5foc@2%|9_WOwreh%oc2Sjb~FpwZi$@hE4zCu+as6lnX0Q{rAk+} zS0cN3F8Nm0p34f_RU4(=V!e^a#_34v5Ld-k)|tz?RN9VWq12IeC-H`_PpOqn$z?sN zw`K*qayfxZJd55|R>@^m)mulYUMuGZvsyAgdnQ>cvs_kJCXH&foHY_S#xrTQvc6o_ zuZ>%+X9MZKck%TdY-Lk(+1~mam13j6YCYLL$*k{Y-&VF?F56$Lf-+zaIv|NJd`~^F zl^vAJ4puI`RjfOOFT~WG*{OW$kItj!ysm zy8o49TG_N*cC7xpw>>*9{r5cn2aa!LC*-mdRZJ`PRvXz#Nxbdiqg52H8?#X5MrWMXxS9V_lpLibkx3ULv*@McXqgX3f z2eXF~`N(Va;a2uYE_+mUlE>#rBYP}~IbA&7%AUw&PipIT=d-8MfA8ae0v0$a_r6iVe@p3DBC6~RL_O5#NTKex*yeeOBWpCuN zH&vB|S~Ghq{r|&WytiA~JGty#RYJ3lPq_CIxYldw{Z{rtF8ffRyVTX4eU!j)UVx8V z*(bT|(^SOXTBDSGmc-mHK5u1TGrtDJq8M9szb zt?Y+f_M=3jTEoZAPf1Mkg8tmfe#vFOrv0kaW%)+-TOx~kW&GaC{>Wv2>NqJ56uR>` zP5zb4GX97DZe{=EvVXO{?bvaw4gZtOp8r*v4Z)(8{U6et{ST@BX3dI?M#-iNe#(D; z)}|L9Dn5*yAK|_EO38)?p2_hH!Xt!7603bwtk%jlW9W+OnWSfy=Dx2(uN`fbTGbjh zYXnU2qR*y)*%dGc1DL%P+w*msGxVdL{an)AkU5)&S|!IR$mR{atm9F_qlM=q<_CPu zs$EvB*KPg?xWEGzP{4u;Scm~^X!Yg`-KB~x9DKO*MZ|d$$k}4f>sd2j@3t|)H~epZ zu*Jof5MPpDq}X_K;4oQ%g)%a%r(Gwus2OR3T*Vh>mr?-RcbbK>JL)3WhM zbG95(@k-Tt^JQBe?-SqDi@t*RisCDgvyg>y$yN?~a>}{QR*_v*b~RcyQ9JUDJh##6 z;h%TEhWwiHYtge7JM*P7R&?#)n_!NUt%I4Ct&22g>mgOO6pWR)KHjE+Kay~pZ6Lj& z^hU06YA^KI#-UH}D(4?**{0H)QM2ec$~wwLv(3Zrk2y{@0W&Sz0%^`BBGn#Ks`R04 zf&+Lb4D*q=3tkZ?kO`kW;{R4B$>s@8-d= zedtBJ{5weRD7_OkcXxb1^%}oaq^$RGyo>Oz!jp;9k%Y#V?H2kg-vf4+-a~p%*X?Ym z*56ofnw3#+0`sJtvhH-837c@8nEx@Q+HQ@o&tQ&|H8In&KBPJ0Cx9~U&Rd5K;9bi2 zAj}C5Vy0zNk>+e~q$+c{)Qe*5gSScliaF_hG1IdBkmihgnTpXlSh54~CRx^z?17kR z8UIku4o0furn{KO$;}SI`@~zA6F(F)E#upq9fmYDw(z~Q)LF8_u^{`r0`e!-gmP6 zDe|Y%>r7iRtD2n_d@1Lri=QEWCOQ9Zt<+&>g`Mj5Y}s>U&vnc5!+Bvx_&T01dx7kQ zwEWuKUhK~Im2d#tMG>*HM_jCkOB8V_BUtcyvsSCt?Xuugyk0IBze4;VGcDseF=zK8-I?`^&)ME$ zqr2Fw+kIG;a=*|Ef4@Q=P{@M}VKo*9)JS+J`0AMBWDjGeWse}u*`r9s3*9&{O7c6A~9=fa=v74y9O3-T}0 zv*^}D#rN7v!GH7YUlxBw{8e%t8F|a2cI>tA1^2JZzajr7J^yY`anRlh%f+1RZOpXn z9i%yX7paQUUo01@z4jj7r)qi9Oa8w22jU+(uNSOTYuHD@r+5Cb_$T6@lJnr`C>g%p zR8jArMa1_i4a)PmBEC?>myFQ)91VrCeHD6B&;M)bZ=}DaW<}x)TB%yG?}E?hnSU?- zgZPi++^G2`%JNg-bshgK{EIM8#%ZIXlYxicZ{ZL2vivUphy0)PspNxbC;Sz9PuG7- z|0De`HA~)${k_t#Hf+dL@5}in9fBFO&XJ;Zj#S47&PbJNf4SJvWz%6<3K{9;pI#wD z6*7z={0klUmW*=&+nQIF)24FUefubBnV_LFbu#JI*xNvNmr7aN}{Z zQ3@EXfcY4}XgzS>EEM#$@Sg^5y$NP%%vkC2U;bk0mqT9##{Ug=NUXI5Jj~C`{p`0sd zRoe1_*TozsTLCjIV}DQ1RzjNgMSj_~mGL%ZJ)akvU#wfk9@Cty78=d|vaKF^yz4cj z*OXq1TE|=sC+j|2JNPoGBxsw9vt($Slk>3XMTcM6*sGO_!qwtr8(^ko8zRlwMo5); zZLraeO~bCC#Bao$_$HV^;~Z(uHbbgS(^;+B=6I9rZeD>CWVeu=NXvqtvoCAfmZ2~9 zwcJX2lJwTDJBrwAZJW^Vq+*~+E}fI!&NVuM3f;DSXzu!)YzNG=Y)7Oy+X-pfI_Qlr z8b2aaxmWd)uz#>+yGl={<}PLVQr&h7d?)5O+3uKW8N2dxwkJ~6UwyFB$Pd_Fc%SmF zdER;Pc5!}u=*Z!bfbTcf5&rCyJC1MpPWdi+p3h78s$x8)TX#gTjl;=GikPB^9!9YA zxcE`Aa^Qb4$H{sz)3OTEoK=ykIQ3$e&WbfGNX4Jgi=WAxe4U<~6yLR~J=O@#<^(5e zO7}_kQ!^X%k=a1l=02h&WUcEkzv02>yXux!Hxi@$lfxmy;cV znU)=iG-pR4P3J>=TCnAPG!~@nH&49Hj**`xe=I#-ZKawWhkp}g^NW)mk3}sz0cp-o zMEc*(h;Mu+1-=J!!Y5;-^x9YQY@-MM z6LZS^Y|OOm9Hcor7pXF5*DJc~>^v+;nXi#}o1HIzf&7K^%)CEeuh>O_ANC#VV&O}K zFC}J$^E8f+{>y?d=LNr9{0i|aowNTQ=i94-zv8)HEq;yowd5>H4Oa~8y1?9qoa}nc zwCo0?IlB?5*0WG<*6|(QZo-1Jo~I>_R=WHx^0(46b1R}>=C;6ZV2+dBj+vI-fi!14 zhbZ%Uv$L~=FXne)LCU=7CBIvq8yL-Wde#dsWuZ-RU+{UH-!J}v_=DsuOTIrpXb%P6 zm<7RP4-4}sK?9wbd9wkMZ`fnOFAxTQ9E)1^1k#*6i8K|xhRZwn?)nrKq@u6sWqDfu z8Tn`Fd9FYo3))-Hg}&c&e_r|p=@+Tlq^LIw1$O(r6u#iOzbyZX{HydVd`Gdtv*K&P zzx3>17w2Zk*_-6d9$ku!JUVgS3jYP>IN95nK`R|8TIopB&Ra$I!h3j^*7QX$%lpC~ z2!H6fp2vs(M}hywoO1pcGcEfBY0f@Hs+{ZHIQl=syOi_7p7ZCzELG0FB-R&^qM^;w zYhQ)usau(UjhU8xgEVK~B2}$$HSu}z9p0zxnUnbUm}%J$NOSfhQt=w91RWzk;eFyI z%!&VunU?*6G-tme)fTN53ivYm8{Q^833JlFW2R+)Ak7(1BFem2LFYxm{=)mj=kr6F z71XkS#QzOGfGfx)YeRh`zLN7HScLXDQnb&JDtCT8!+|kf@I{c33d1i*EgLF6jGQ}T zdl7rP4G+CT!f2XHkB}Zo&7G@RDRW=sXJIPKWtih+GhqfzbEG+A?}|R>+VdSYE8ZkK zF;O(lWoMV2gO*vhTNNJ_HfQiD&gT;6)oS#elXC|sRLf=V=kta?!~H1v(ei9=DtojP zJMpDr{@^?{bFu}*7ZhKJoaYOUoG|W!spxl$0<(v+Ws3?gMyxtin`=zyZCo!dy@d3V z)Nyt}*Lm>soi8Q6wD>aQEJ=H{+OTB<7afliW&;@==fuprzdPS3_Sy2mpT-;~TLCj@ zoFhf!9H|Q4s5a2ezB1mX)jSq+(yL$wjdP@EoFi39&~;w4)$uOj7m{tWHH3N2%NaX8 zh09fJ`?Uk}%cyc*M|fS~^@v&Q4)#-5Z2jP0B#oxII4?z@X->|qme1S9LI0C&wuvae zrK4F+iWhS3v&}J+iu|=_I{}N(Uyc<0yYp` z#~g-$mbvg`V(x+EQl&Xy>;p@hJq4U>cj-N(*$t5T`O!>NZ^d2_@L2L_;wzwC0sNrR zdg6*4&WP3#ob4`7#t-S1b&7Y9bC>Pm)xPe)>;U3qC1Lh4=B&qYnR`_^Fgt97dxa~) zRbn3X*iq2!({B7Ok=FFn#M>;BH~G4IT;Hs)Tc#0y2QPV3o*N9E=k9y+J@~d&wt?_7 zx*wFED!(^9cK}`2Li0R0dmlO3zT*3d@9(^`s@{PEf=_mSp!h-J2b1H4YixaX2xiin zF6{PDEJ6o5QgomrRY{udXjC-q@ZjfRj*}gM88pn1qG66yo2ZU9iye)3Deu)WCwvTM zS~d-7&UiLR-L2it2Cn;8>^Lk*0X+99;CRfSZH^RebEMiGd$Bt<>?FKRdaPIC$Te~he8)=;Edhr{?ZzR`8V$mA> zl5*R(fWHPutsq&f}|kyXYOFcam}wVb7}AU4dEqoa}DQpa&f(n&wDT zu~CU=Q{0F5sT$dU7r$Tp0r3aPS!}gGn>BkV{8pHge;709K1YhyIa2Lh9Yu^mvd8c? z<^FEMXq-zwA^jvZUT8kI+f$fHb}uf%WKUzJWzQf*=Q&cXW!_LF&*5Fd%O%@p&kMgG z{30=4ZH3+rdkHhip6>Q#ENa;+NOSfoQe|8$;6mwZfk${NwM~KlZKQ z9bSEJi@ziOuJdX;zY5uV!MAk&zW4{?AChw?#2HHi-|V9ZxXX+4u>w9(z^4r0M@w-4 zU7+?^==EHGF8zh{m#*vGMReeO6?!4eak8&5gH}0Gw91icRr{;WatAui@jg`}D@6Qz z%%D|{6s>Zksz!XQ6zwOxOPH6jgnt(PMfg`@yzog@vEMM0>|I_dzhedW7^yY8IszdluGn#^9rUJ!cZ1S$r1f?f8<7ljp3# ztDgI8;D#LVHYRLW zqHVUg>=Lp|(&E*IuX$w~i@B6BKU+E3Qdop;a-f;1hPMeEMjY}w#7ukLZ;;2j#t1Uw8fF4Esj)J)vD99nK0hhC9c zW3o-8HlyM_Bc}EV!n>iEPUzIJYW6- z`3vb;HP}w6y>d|mu-}lAU95mh6mY2ru>aHW1;j3kfUlBov&$85g#xZ*0C%xEMobm$ zs?ZC1d9IeeM*7;&80(KN^U&I-pDS62?h97M$M+#2*)bLi|Z`ZhtGLQLImeXV10#)AG;A zKTFTTbW|}I%bpAUXToTdOTQrfA~jz4E`i#v-)Jk)lhMCbQa z9D+q?jw3~L9O=#+rh?Xc>gB`bfaxOU78V4PO|O`tiW$ZjUTn@|pT&1^1P+ggdE^l> zgCa&KVk9H@=L;CmIAhpj+|DFBv+OLicwvB7A69i%%%<}1>#I5&7GVerQVd~1x--WV z#Ru0QDs#>V;w2SMHkX3tR?s{Qnh8s5O?)dW;$9QiCJY$ zj5ZI-&OuJLyXYRGdpfPvZLgrb^e&ngZ5J(&vYDw1iG?Q42G$WF-5yd@NT))&7{VIo z0gkV0-NAV|m6Mgkr-=7BXBRX&=gPtF^a|`1uZUO4xq&#IjUxnV;ZO8+&E!qK?w;4e z%f$g}guh6IL2;V$ee(VE+&~=nZ3BT1^}-AaPZi#q7%y~RqkK*G#Uczp zM~dO+NTu<;1Y?8`2)(lBeW3I~(g(X{XVHKi5}JdYIoYAoE$PEtW84Hj$qo*3OVbAlNkCB}wdn_$pZT+PVk)xk&_*dMd&$4ik@?%${B5G zJ1OjZiK3k@dy4F-wA@i}DWr%F%hQ5S=UJaFeuns&|qxx>=K1t$}nzde6znS=>MEv zE_#LNl}_99c2&^L5X;G~7QIIFT2ii34a0)23wyBdQP<1fAbTS%|D%w%n}Rl(CnmdD z^cK-uowgV3wxGErF_cC04$(WE7CP;&pa(g zvZrOAk$sjHFI+Z2`^lceY|42d&-r;QYS{}&bM_+A^nuXCrwzu^zZCvS<&E55mVZV5 zReCnHFr*KI(l}=9wTR$&8&3ARBHmEMn~cD#4M!KcDc{0uD$jvl`nRzN<5!Sk{0dTO z^xxqu@?Pizy!7u&e<1xKwa#YjB>E`usg6Gu{zUjwVjhqE=oiMB^|Ro-V!+8h7ym;1 zOLDw$__B5N6=qYp|L6K^EJ9Z~QgoFg{cle}C4Lu}!&^An_rgC2|L7QFFAMfl;ELz` zv+ys%zY^os#^K!PD#vUp2M6bKvfr@?UFAs8RgP49N;jHoe}z54?ccKh$o}iLhFi(n zP%|mtE4;``>WZwA?Bhm_IzQIi5jygz!jW{>K#d zkB8+w8k}q<*_mZ$p-o>->$rWUV6%qjcqIARxoKcohSk$u7NOLwHQsrI4UC=gv;5|L>1%wwAUWgbk-cEokOtvuQ zQ{L-%-iu%nI>?ctgB+>y?royYFedP7p7-LyO9(GX%qC0|cY3y`%kE<%gcmqD*-{Ew zS|Q6YgkKymETXBeOUp*U3gQSDr-1PaSdIbAsf=s-%ZKHC5u9uV*%f71qUA>7trHmK zY%7P~KpK7(`BmjtqsN|zGfEv}G*%DI0cV_S4e2$d*P`Yool-^D(b|D|9}Op4M|fS~ z^@v#%JPo77n?%+Rf2-Ha2J#!qZ$!^}!N+4KdL0_JaRgk=+%efE3fNQun=yc;$#|FG z=7BlDl9Npk-a>dHaq22#w*Yp7EhB&<3Kg)G0wyV7YX;!OOMC19*#`6f?KQr(+hP%h zuOP+n6{IQ*OK;l;zA|Nv;VZ&B3hzYBs_8eJXmK{=A4-|?0w*WiMSfTL$@F-&q5l)3 zC~Y^)C%%N2Wp^yX=yIeOU5-?oL*l9(wpZ}X*EBEQE?yvKY19nIM{P&=F84+GPWdi+ zyxO|T)pqc1%%{>+yfh^&YS|Q|7{h{8Jg-M5SULFY$b*yhidV#|uVWAIzuhukyOt7mLsgM~Y@RQgMDl;~U!n!8s6?lN~62kodvQ(W2$|lS6{fpEMfc z;w|yR$hlVSxL^73u)DcELiR}6qiECV7d3Nq;GtfQV}z#(A4|;B7)M6uaR(z>7RN;h zuhw(2;}vp(LQZ4|{{y?PofLL4uf&sOPmw*97BAlJ$ouY2!+fg5Azqr(u?WLhkYX4M zQXQ-1e6yna0MCj5-h{@<&Q`!V3OJVmcwrwvPbkKiV?O1-trz`#EW$t*q!`G8R8>=F zEA67dvpT+5_!8ku9aq>pbXnjDUh>O@uMobH7_a|2h3qQKr-EGM{Aw&}*)>RWb}drX zA)YB?*M(ir?e(%Z$lgec7w#4`-qU*%=2OZwvjN zEcETtcSzsqn%|SV?5@zac;0tQ-y?l5HS3BO{O$|8gy(!e7PagFq&a&KsrD@0h*Gbi z^=A)ZSt|NH9`dk49#P1n3}F>?6zs8}FFJi(^a;@?Nx737$A3N*m;;qL+0(+$2tP}V z7e4EHIRfZ8%%@^-oHr+X9*fX8M~cQdQt?zL_EPX$y;5Ele?|ONaux+=QFNCFE_&Xt z3%?=!CNUfG=xC_abqVyXh~U64PWHAU-ciK6j9~VSe7(os3*7Jcec=y;KMY*$sM<$? z4|n{r@F&8b67z(MPcGc%RI$&(U&>-&vd`tekpGgN8DYGmeHHcuU(c^)zmffxmdy*? zuvRVDccDi!TTJ%7^bgWMQm1t+_t;N?8(!?6g?|zLm6#vs|J~4yk>wGvlk!IHzboJm z1^meXyxQtzJXZ$obIhmBKhE?28;j6ujugG-NL3g-+rq336_oI%zVU}(5gO=7(LhJ4 z^+Y2IgS4g#J=(LLUV5nXFlts;v1r4C&g2=-AUZ;Hq*Gk>oG~bhrpaa!omq4iQr2Ea zsT22l2L9IZY{Ih(&q2&0bQWyRpcB0abBWF^Iu9wgVS4JtyrCcPj7Le2mY$EArNEHV zhRq-N8qazG;RS^kBG&Gq4zYzpkMgV+kzQ1KF=}RAZ(^?=6S%|ccyZw+gqL(&96&k7 z242C-v6S%A!pk`B&GQb=z_a^4Jx+MM@N$kZBoB9Z2A<@_SV4G2;guZY7%$k$f%ouY ztRlRs@M^?(;RMx*_IS6gjyGwqJMbG`l68dF6<*JA zXTDsw^#gC_CD}lDL*b2xd4A$LrfWJIN5s~i{U(aoR1upof`=n-BEv08Xpl$1YW^oC zC}0Z(Ok@CFZPj)@j-hT_;!RrFt$by-!XgY`L5krkNYmD5zwb7ISMkzpE1VPFj+i@E zXQ^23uD5d4PEfLaf-m7E-&cG;@%_p1(kB{n zw*&AdZKTUt5KML;7U51$q`1=)>3q&SvL7nnl0S@|D~iVr_SoTpe^l1U z{0QMAg^wcU7Q%ILK8htYpLM{=j**@weJnMzZpWiZ?YO`u3Vgiq3Bo55GwWKlAFb+> zLLcX)K3Vz{=~F{nb)aOYg}&VN>C$INpGnQFkIS2lD()k)vm)R!4>(%^=P2M@251|h z#~Rni&kH`(x6%3H7l>a-ju*EPt`yeoBD_i4=r&*1i?OI>mmtmArASpV1$H)G7WiJr zmkVDZd?hinxc*Ok#Dd|PPVAj-*Zm%c;#PHMc`@YTJygqz6j!kbhr=KgLh!feEf^nURN#2+MQKIoBqDCqW{^TVQ#h(1cn73#%j%VS|@ar?OJ z6S7aza<8rDF>>~)z`Ui8lRYi`jPSF>tPmZ{_FU-2z51S)enI+0>QoG~m%`5KIlnCX zitMYj+%oyPhW@=4d^MH=lf5qfhWMM#@hy;dcfS>UL9e^F#orNsmz?FsgVAu}e=qdb z${Lx!Fa3e^htw_z`r{1fp{$#qKM2+Gd_|KoXoF8qb?m&D8)!-Vk6 zT>C0G?{(#5UyFYu{w+E4#%(@b_Fdq;9e*$UgYb{U%o`87&AL==uaE|)_)(j zp8F6iLOUEO+Tlq5s~KFvZ`gFfAM=%+UVNzdFmh(!ZzbHGUa{ffd1C=5n?Zhr{7Cl& z3>B=Pp+95zlM+Xlx%|xXv(PgmUVE7}EN__QWV6Z6E;|RU_5hyw=M4OBvKX==Jh$*X z#4LCn-6MQn$GqW(c)>@>kCva0o_`f%3g-_xqgUYqq6>;HM9Lk2*R=W6mEae9Nfr@b zRD3aV{z*Lb)W(G6{UMxeaoHtgm!!oDJ>QLz24{`MyR!|5B;y#_d3$+O0P%Fj63tTe$Wk^ZXmj$=tfR) z6=>t2GdSHubW_pIoZ|kQ&4bSCJKhA*Ekq}ha+UGSS=%z~hrViC$xf2pnwC}8%?CJc z6Zl{+!?waX;q8c-alKx)?Za;3)w_f2j(qGRI(@m! zXR%rX?^5;kd-hE%LKiqvbb%vPp&NA@2zrLs#-Qj_(Y;Ao5*|<(gA)35-;?&0-cNde zYG%_wzxn}T>z?z0vIof??3Nb?4+*=S=XTB^@bG*R7$-YI{z&SLbg1gG-AUo+@KT&Ce~SF6^ejcb z(@qOIn`eEx=ozACl5+2_Vd*kC4jiSr&F6~Qac);aY)Uqp(=IlzO=?q=Rz!tQ$gRkl< zdbRj9;@6ThpK7;V7jzrX`FhbCL~kVJDs|)OcsGTe=2d;O>@Bjl(sDPgH*oKk-4=Qe zuan!Q?~uNenstJ&Z@eyfSMVROkdxglevkOQ z$F;GCg74-l`LOsS;*XN2vkfczvC!9g=8sE1A^oIlc0KgjQ=vI{m6JU!{fzXp)GP|V z7jUHibHVrZT6tdl1@RZjRfoMLe0_f@^m49WmVQP0RcihRuGHFVVR!Ljzb^ZR?3->` zinqcp?iKR3>^rjWy6tKX+IwO5_I%%${Xq6ZT5cFT(Z9z&3VgH|_+#Nugg+%_Z4~f$ z9^7~PS@5T^kdu8b{)PCLPG?h2=v~IN9&Af5`qx%T?v0 ztuYw?ui)&_<79t}|0Dh{IkT?dDRFHesnnZ$)#`SyjVXTh#X&;=t?X0=z?jl2r;vhA`3T5eDDW~3qD;a_uKl<$=9qUSmqzWJhauRHj&&P(D` z#CyoOg)r2))K@~2x*P#lc-eauP*FgY0X#;l_;HC|tA*alv(KbWx=zjiz@^hh(0#Fh zlQl*AMEglugBrO$5co*XcTjk$@ZQ9%6|}@~RSJFH;ZJkFul#=U`_t=OF~BF#*#W`F z`AQxrevtUVqDhm(ucX`PI zfJ=v+76DUy1x{DM845U)0qG3WhbKki8HeHdFl_ySv*pi`KbM|=83VBGys%p(+Ggj= zULboREi0a9qh7lxbi0@2V(CkyFQw)#Go_AG%VmLI^kQ5te1-6pj=R|&x+?G~j;|KJ zM)+D{mLuQ6X4!S2XYo>BFMWgbjnu3{oN+tYU1~Q)z~dfpGZta^2~rF{L8|QeqfT|Z z4R6!A-c7DJ31L6;o>kQq% z{o1^X=Aj6f=)23q3V1{Tk1{~{@Tl{ zUMykse@nk1{U$X_f+179=kTrIlfB??i@ziOE;;`LL&@I@y1$p>ebEm@KXlqxw~vDI zi8h?)-`_p z1U)0)g+7HP!DQb{{~-M%HM8l=XZBOr&3(iCEc=V>ue2;UF4g1G4o-8wML>rK{H}mM z6!0elm^m6z_E*^5v4oTTE&Gq`zix3Rw>AvSO9#*OzKTPz2yJqtXpOs>XAm7BI+BzX&6}Z$He=}Hy%aM^&n!I)HUESCM6-sy z*=u4p+1X|1aEnh^n=|Z=UW&P7=a!wvE$*GMdBdLJ`HqquEjyoE-mE@<*af`23&<`g zyAUn^ATC%f9CRzsb`jAOh#+Uv^)ofiu@*$Sd7imv1o<49HxI!F+h1Vlyb>#8npuwrOe(;5`kdtj7 zzM=R=?F^66WL8=H=||NyaIv-(&oYc&x<`ld<*f3c^8ahk zd@Jcm(p$UcSJwgCCiI5Bi*75OlirS6Rfwm4@;Q3jhacr-*+G6s`JL$bA9yr@acpEd zxWB^^PPU8euCkM9(;m``R(If&z1X`8?;*S=G3x|3N8)s6dj)^r%bgc*7cY=A@2(~X zngpKec^8E{g}aEgXJa@#1_^ctU)Om_e2RDvIrB!RnOQk7e-nh0^$J&ntHj(mz4<9P zsn7gx}TbrJr&Q*9SF+@cX6^o*{QO7({e-NX`;Nq zXZwWjkcQt^en0vB=~?bh3=PH;QbHHKBnL_#Bz>@J-X@Qs-l3bW50!37A4bgztoGo! zL5ByTt4)r7w}b)HR-6gTv;s(C2tTE|*s;3s;~ zzZU;S{9AHnjxHxY7$x-muD_T5LHb8(ZU20YKTh>i@BtPZll?6Ii}xsmzvKC(UV1ZOF7SPZFu@g`aIsxOFsmQ z(AY+b#x_#@i8LH#y3jj%fu@%pDm{#v=PZ1=DckVCGkDfB2#*jRNzBqW^oZ{nLm%l` z&m=vw^eoi+-h`_)C``p>jes$p{cH-DT>*12fH_ssXEtZpt=-NgJGbmSwA@D3_9=RN zc<_nNM~ROXpO2hniCZA&kAS=vegOq6sDOnS!2ES@U570kd~M%Si-<2OzL@iTf3c2h zH)Dctf;>3c;^Iq)FGB3(ZG)n4!w#BYGC3!RMN89C$z9 zn41W1D!dtSIx*rNQk;u64}Y?+C6yJ$lRfs3s@Uchfz+Wp-G{j|hm7Pq> zeY{!wuWx;~2)N3tWp@Sap@2OZkXDm78|@YPV9!1;-7Z~lT||##(>g*g=>;!JcS?6r zb3HLilpTZJ!5d!VCGjcZJ0*CIICzM}hy?oY}* zrZg*dK-ibP_70RiNcLb_)*jj`&0e&`gP+BGG1;NwE%C#g^OTQ)Er$o+-V1$%_>tmA zk@G+5=#4o#?8a`7k)0-cEG@cvP!jaV=&q#WBH}KV1d|=Fh!YfXA|td{;Yh36Nul>k z7!N^_K1KReYE~C6dYu+@Hm{V^Mb8jDlaw1~N(n=3&IaXB|Y=T zW3ue3u#b9$UoCr$?6tI9%QDAjTo-tlXMDZz4Z=4Pv#!yI^sUtV zn*##_c3apFycoC3-XVJ@t+qQxD)T9#dAlnj29XCRyIT?WDB@m5aG$NSQyNbx48DT@ zf&0ZD5Py)IMMvL0u36oU9;!H4tOD+mwiF@MOxUm3&`1nsThiz;hm{qd-ia6Qy!b63J5i@ICLGLNq@X&Y2LeC&QLV6@M_mxsbZQmJ#U+3kX zNqlDUS)6y|@o-AC!h=8Sxz8p(yZ9W=aoaEN*_ku=BF^U$pIdw$=Vgqd!6)~;!N2#y zj1nI$J|8*HD;TEPh%YL>7&-p~ zZRIgRH}s+}F1m#1lBC)fEBTt9sx>wOmiK*eDFrO8fMpoKb2IPpvSkBb^qWDVWEDXl<=vR?#b|YG@Bp>^Olm5oRXK=oW z_@?5Uk+aPGCG6Y%M^1mP`&Cld3ZNRKYuGIR;cIN4UxlccwHjfeB#YtJ^Jk53q* zP^5Fx+fk=)1b9k1j{WV!U+R0+4)Qz7??lh7gL`H2`28dd0FQu$JYW|E?5cpt4B(n} zl`*1wx4^x=b#@otLwHYOmWI!gz(YT5ukaUn&E)0Vz)(Ag`m!~P;C*M!c>~SwE+Z6-B4{|;zK2>~ga;|K@hA!g0_`%L~=SkfT34V$4M(&4-x5N)4*D|!Y+#?oa}5Y!q06Y z#m{XbO~okk7jDnP+w?KIjThs5=?kPUq~_X8!Tpy(UvPS{=p~|;lCs2TZs9Q~fuCX# zFxll;gi+y0F)AFXN?b2yb`{>FGR*F4ceU&_ve(kGo^iWlBj01!1z+0h`Fimi#BX%Y z2PdKBd{gixRSuNoX7O9ZZzX53%k12{E%1lFj<*ZnA$%t>4<4RuIdb-{@cg+%PIkBa zJ@WU`bDNj&1F>k6hd$qPzhC+R=?AIR=*Oe`N)7Df_E1Eequi1I!-{xB5sxy0e>h*Y z$AXS=`nc#5qEC{tdih}7r^3GO_G#H?WS^yFwq0nH2hF_9&x^hw`XVVejur73zKXpR z{5`Lbm&IQZf7N+w}b@4aE-*k=*&wJ_L3cii+DQ}CvBmOQqw`E@y*DK!( ze4pd@g+CDfkeL6`Y;@X3VQ+N%vFsehA?kUy}OSIwsf^-CL@0kz5 zA~eR4qA`wCHB!M=?6c`YujBj3^wLA6hf%XAz5HdH;enTNJcIBE;gOEf(SQdO2EJCM zM!9Dao>_PnV%BxDrr%YZHTacYf3u0tE(W~^lShsfItsSo;ysq$ij`0<)YU>AH$+zkT!W#;2M9lvv z)NSLSPk5m>5#3aDGg5A#-YQ1?+vcIidBzi@w~(Gl%_8FqaGz}%crM==w-TNtyfrba z54eOD$2P&|biS>4PJBCZ=H1N?!tDd^>(#M?@Q%Ve5#yDv`LnA`KXS!I={&Hx|A}3& z2*0z5G-s2M>QAU`yIbI=d|h`J-a~j#ViiAqAKNSVWY0S<-Y#As*I5_iW_zq7^lGk) z(w)*>)LdEaTGkzSe=Oo;CE+Q;J;XYQa5rhg%AsdY80~TCigeX=quRtFR||cd7bKH5 z>ALH3z8%}Y5&8qK_NH{7bU!t-VH0a0>@}YApzKuHy=hs0)~rkPIZ*r{@q@{Ero-T>LJzvZ?2rgJ+^gkK1+)}!m2u zWdwID+|i$YMfkP|SW_GUw=3Wd1>DI1mJ>e^Xm^D@(~EPr>^-vg(&{9EQ7c8eFZ8m$ zqW4QbApIbI!{U#KKT6J$_vS05PX1ERV-Ya32RyETClv4`1GpRN zB8lNv74$qm6)`vY+CHtAXB6`+V^}8qzA~@rKNr02{CV*g#9t(5nTiAG250^+MZof2 zLoX}f6$QM?0OsFa)YYcff*Wz;_s5Pss}MvuY+mB^Gk^PjG+qHMFo|Wyh&}(=pK9~MN`b%nNGbNAadDw@2 z1;3X4M)q4;eJ5kT7VgKw@Rf)-5qWU3?-lWbB7S6q%Az4%KZTyl%ks1IFVeqK^C-oj zA^fOM;By@RF8qh^pTsJ7N00p#_6aZe-?IP6{_B>{ziz`PRr*|>Lj^{WhhP!f=Sa~$ zN2(&@+9&^Soz}9w9xFnq{ux>+FnS5BK$(Np@!0 zS!h{xd=5j|W({p#j@hJVm!5-~yDIM1*ZKLctPQXh}rJslPPeXvxUPy>V6UVMdcTx=UV0~_+?8Q z6MA7R<7A6VFCo38>nd-SvazAJ@ETl7dTHrpsCm4n$A;pFTs8tu_F5UIfbj}gjsaX# zJ;QDJz@M-nm}~{%6@^zKW-e3u@!asRulic9BD<>WYP2b1d`zq!_yc8&jMoreQ+O@M z_}-2&*~Y==@?vZvzNz?T z&ik5WHkmdL&R<#PWD~@<5T6(vpAR^-ZW;V`=Ua(S65rZ+2d<*=!X!|v}@vV-i7vOBq*f+zj(w?xvx{*rH)U1WEaolMIOGnmIoc(=g(X=6^d zyYL>udlK`8uk^s2|9{nJuZUaE0=9bpgg!WU&bWxL$g`?{<|wM#|KQiO@rxCQhd#;+a)k7e(nnEq zMbj+HL};zri#Q**yHClgCFG;dA;}z;y03W1=yQVMvwMQ;pg@2Z^C)8$MGSsTj-z=Se4b-JFSDV4ti4?1 zF5oJ`8(v)sZ|0;sk!+&wIuNCx-g1%)Ci^Rd_?0)_(`~!Yed@uil z{EzO@)7Mq)Xt$rjui*Y?`CsIJrDsKBKeXS%uH$w8yX+sbf4c3!1)IOZKF301vcF~j zk^Pre%|zqF0o(8?oIWpBN*GOeEJ70=DVp#|rB!O1F7)zV=INz}N)Myvf3W=?^k=6t zh>j2)NvbVWY~W*S#?Vjs7Me+VX6ad|S=s2GZq{%SX4dcn?q`#qU49OFep>hg{}3`~ zgskFgI+sG`R>(XI;c23$+Th*P^9Fy%|H3Ho(c<%wbN9_p#W1k>18?eiFCe_2@Iu5~ z*`9nSe&u4}(64#ci%2gjy%@E=@Z(Z_9=8VBmvC}gETMoU84$Z7T8v}EKj?lb z`K9HTanBz*sScuxJ^Wl=pmFl!<(H#pfqI%egW`<7d<4Aj0V^nAMFp(H0M;c3rQ!Fy zRu2BSmuD66RmE2$XZbniEssNE_3*Q~UqgOP`L*a-_q`?DC%<;!#T>6Aysq$i#H@RK z6GrLR4}F&F4Wu`e-iVr6cUcY3It)DMtGS8rrox*MGwVK%=e5m4-|q+M1nDiLCsOl2 zc+=ySVJ~pImFy(ht!bHauWqv0CiKc)=xwEQ(%VsU`|x4*xGAG<+eg4*Uho|hu%iNY zVu0!dT}lOGOFW$e_VVm^k=|8$GBvkmS5>{By9KX$=DUmUA-*R$s}K)8!ehq+AMH3V z+%8;jtX-=k@VkzS!kxlh#4Jb+zm>?*=fV5E+$HfT;yvVCOa4}I-pZkGbKNUlk*-qf z!&DyteE4uJBCbL%oGep>DWc8@ZoLYgB-jYMiRa#w?UU`NWmzze2{+#kgr3H1G1;K> zRO!8`nN6eJ_6a)Rd&<6|`-$#P$~wW1t#DYv0l~lXDm+m9An}9AnNI;j<_`(`D3)-t zLuFgChtV=?^vB?)N;^FGfu8jd;zx=fMb512=yN_gY>$`v7};sE$I|kM=VmY9p2Oq9 zzvp=$FMopkiS(>KjQPg-_N35F&;4ZSQ>0I&=6`hH{<_n`zJw*5>~z^PWY2V4L#fXS zd%bVOvt`ebJ(rfHZZGkX`XxIr{9eo$lbtVrf&7K^JQ9R23cR*wezEW+!j~d#j%|+r Ef0s=4tpET3 literal 0 HcmV?d00001 diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index 393fd7d6c8..dbb67b01d0 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -56,6 +56,9 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): #Get data as a list of dicts new_repos = session.fetchall_data_from_sql_text(query)#list(cfg.cursor) + session.log_activity('Info', f'SPG new_repos is {new_repos}') + + else: session.update_status(f"Fetching repos with repo group id: {repo_group_id}") session.log_activity('Info',f"Fetching repos with repo group id: {repo_group_id}") @@ -65,6 +68,8 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): query = session.query(Repo).filter('New' in Repo.repo_status, Repo.repo_git == repo_git) result = execute_session_query(query, 'all') + session.log_activity('Info',f'SPG result is {result}') + for repo in result: repo_dict = repo.__dict__ try: @@ -447,8 +452,9 @@ def git_repo_updates(session,repo_git): if return_code == 0: - set_to_analyze = s.sql.text("""UPDATE repo SET repo_status='Analyze' WHERE repo_id=:repo_id and repo_status != 'Empty' + set_to_analyze = s.sql.text("""UPDATE repo SET repo_status='Analyze' WHERE repo_id=:repo_id and repo_status != 'Empty AND repo_id=:repo_id' """).bindparams(repo_id=row['repo_id']) + session.execute_sql(set_to_analyze) update_repo_log(session, row['repo_id'],'Up-to-date') diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index 096a876809..5f31c753a6 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -173,7 +173,7 @@ def primary_repo_collect_phase(repo_git): ) repo_task_group = group( - repo_info_task, + #repo_info_task, chain(primary_repo_jobs,secondary_repo_jobs,process_contributors.si()), generate_facade_chain(logger,repo_git), collect_releases.si(repo_git) From 55ef054fa529ebda608d122a7bc3869d487ce565 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 10 Feb 2023 15:17:40 -0600 Subject: [PATCH 115/134] raise exception if could not clone Signed-off-by: Isaac Milarsky --- .../git/util/facade_worker/facade_worker/facade05repofetch.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index dbb67b01d0..13e57ca353 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -193,6 +193,8 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): session.log_activity('Error',f"Could not clone {git}") + raise Exception(f"Could not clone {git}") + session.log_activity('Info', f"Fetching new repos (complete)") From e7ea83e22e57d998ddc52d68878882f800c3f938 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 10 Feb 2023 15:18:30 -0600 Subject: [PATCH 116/134] Fix for annoying repo_info race condition Signed-off-by: Andrew Brain --- augur/tasks/github/repo_info/core.py | 17 ++--------------- 1 file changed, 2 insertions(+), 15 deletions(-) diff --git a/augur/tasks/github/repo_info/core.py b/augur/tasks/github/repo_info/core.py index de1242a16e..cb62ab1452 100644 --- a/augur/tasks/github/repo_info/core.py +++ b/augur/tasks/github/repo_info/core.py @@ -298,21 +298,8 @@ def repo_info_model(session, repo_orm_obj): else: archived = 0 - current_repo_dict = repo_orm_obj.__dict__ - - #delete irrelevant sqlalchemy metadata - del current_repo_dict['_sa_instance_state'] - - rep_additional_data = { - 'forked_from': forked, - 'repo_archived': archived, - 'repo_archived_date_collected': archived_date_collected - } - - current_repo_dict.update(rep_additional_data) - result = session.insert_data(current_repo_dict, Repo, ['repo_id']) - #result = self.db.execute(self.repo_table.update().where( - # self.repo_table.c.repo_id==repo_id).values(rep_additional_data)) + update_repo_data = s.sql.text("""UPDATE repo SET forked_from=:forked, repo_archived=:archived, repo_archived_date_collected=:archived_date_collected WHERE repo_id=:repo_id""").bindparams(forked=forked, archived=archived, archived_date_collected=archived_date_collected, repo_id=repo_orm_obj.repo_id) + session.execute_sql(update_repo_data) session.logger.info(f"Inserted info for {owner}/{repo}\n") From f04b09cbad56f39c33f57429a4db19c6202b14de Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 10 Feb 2023 16:06:37 -0600 Subject: [PATCH 117/134] fix check_for_repo_update syntax Signed-off-by: Isaac Milarsky --- .../facade_worker/facade05repofetch.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index 13e57ca353..b96b0999eb 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -198,9 +198,7 @@ def git_repo_initialize(session, repo_git,repo_group_id=None): session.log_activity('Info', f"Fetching new repos (complete)") -def check_for_repo_updates(session,repo_git_identifiers): - - +def check_for_repo_updates(session,repo_git): # Check the last time a repo was updated and if it has been longer than the # update_frequency, mark its project for updating during the next analysis. @@ -213,13 +211,12 @@ def check_for_repo_updates(session,repo_git_identifiers): get_initialized_repos = s.sql.text("""SELECT repo_id FROM repo WHERE repo_status NOT LIKE 'New%' AND repo_status != 'Delete' AND repo_status != 'Analyze' AND repo_status != 'Empty' - AND repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + AND repo_git = :value""").bindparams(value=repo_git) - repos = session.fetchall_data_from_sql_text(get_initialized_repos)#list(cfg.cursor) - - + #repos = session.fetchall_data_from_sql_text(get_initialized_repos)#list(cfg.cursor) + repo = session.execute_sql(get_initialized_repos).fetchone() - for repo in repos: + if repo: # Figure out which repos have been updated within the waiting period From 7d8f5d4bca14ca84622997c2b01e5edb1e790352 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 10 Feb 2023 16:18:15 -0600 Subject: [PATCH 118/134] typo Signed-off-by: Isaac Milarsky --- .../git/util/facade_worker/facade_worker/facade05repofetch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index b96b0999eb..700ad3baef 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -253,7 +253,7 @@ def check_for_repo_updates(session,repo_git): AND repo.repo_status='Update' AND repo.repo_status != 'Analyze' AND repo.repo_status != 'Empty') - AND repo.repo_git IN :values""").bindparams(values=tuple(repo_git_identifiers)) + AND repo.repo_git = :value""").bindparams(values=repo_git) # ("UPDATE repos r LEFT JOIN repos s ON r.projects_id=s.projects_id " # "SET r.status='Update' WHERE s.status='Update' AND " From b3e6738f2ee11ffd03112a6785e99718dbb6bfb2 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Fri, 10 Feb 2023 16:23:05 -0600 Subject: [PATCH 119/134] typo Signed-off-by: Isaac Milarsky --- .../git/util/facade_worker/facade_worker/facade05repofetch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index 700ad3baef..8b4679ea8d 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -253,7 +253,7 @@ def check_for_repo_updates(session,repo_git): AND repo.repo_status='Update' AND repo.repo_status != 'Analyze' AND repo.repo_status != 'Empty') - AND repo.repo_git = :value""").bindparams(values=repo_git) + AND repo.repo_git = :value""").bindparams(value=repo_git) # ("UPDATE repos r LEFT JOIN repos s ON r.projects_id=s.projects_id " # "SET r.status='Update' WHERE s.status='Update' AND " From f190cb061b346cd14d62021e8867b7a1a2ac071e Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Fri, 10 Feb 2023 16:48:26 -0600 Subject: [PATCH 120/134] Fix clustering indent Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/tasks/data_analysis/clustering_worker/tasks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/augur/tasks/data_analysis/clustering_worker/tasks.py b/augur/tasks/data_analysis/clustering_worker/tasks.py index f8c2b8b02f..be3d199637 100644 --- a/augur/tasks/data_analysis/clustering_worker/tasks.py +++ b/augur/tasks/data_analysis/clustering_worker/tasks.py @@ -185,9 +185,9 @@ def clustering_model(repo_git: str,logger,engine) -> None: session.add(repo_cluster_messages_obj) session.commit() - # result = db.execute(repo_cluster_messages_table.insert().values(record)) - logging.info( - "Primary key inserted into the repo_cluster_messages table: {}".format(repo_cluster_messages_obj.msg_cluster_id)) + # result = db.execute(repo_cluster_messages_table.insert().values(record)) + logging.info( + "Primary key inserted into the repo_cluster_messages table: {}".format(repo_cluster_messages_obj.msg_cluster_id)) try: logger.debug('pickling') lda_model = pickle.load(open("lda_model", "rb")) From e7aa98c696ed448dc3c63b39504873c35611e44c Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Fri, 10 Feb 2023 17:30:25 -0600 Subject: [PATCH 121/134] Fix error when repo_name is empty Signed-off-by: Andrew Brain --- .../git/util/facade_worker/facade_worker/facade05repofetch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py index 8b4679ea8d..48f0bfc346 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade05repofetch.py @@ -310,7 +310,7 @@ def git_repo_updates(session,repo_git): except IndexError: raise Exception(f"Repo git: {repo_git} does not exist or the status is not 'Update'") - if not row["repo_path"] or not row["repo_name"]: + if row["repo_path"] is None or row["repo_name"] is None: raise Exception(f"The repo path or repo name is NULL for repo_id: {row['repo_id']}") session.log_activity('Verbose',f"Attempting to update {row['repo_git']}")#['git']) From 244ec5ca0664d56ad7d62dbc565f0d55a9675047 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Fri, 10 Feb 2023 17:41:04 -0600 Subject: [PATCH 122/134] Update default collection interval to 5 minutes Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- augur/application/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/application/config.py b/augur/application/config.py index b8c254bb36..be5cd3ab9e 100644 --- a/augur/application/config.py +++ b/augur/application/config.py @@ -77,7 +77,7 @@ def get_development_flag(): "connection_string": "amqp://augur:password123@localhost:5672/augur_vhost" }, "Tasks": { - "collection_interval": 600 + "collection_interval": 300 }, "Message_Insights": { "insight_days": 30, From 6f919a25667f3666bfbd303e88c6b83f71063bf3 Mon Sep 17 00:00:00 2001 From: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> Date: Fri, 10 Feb 2023 20:43:21 -0600 Subject: [PATCH 123/134] Fix syntax error Signed-off-by: Andrew Brain <61482022+ABrain7710@users.noreply.github.com> --- .../git/util/facade_worker/facade_worker/facade01config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py index 89aba0f9a5..a405aadcca 100644 --- a/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py +++ b/augur/tasks/git/util/facade_worker/facade_worker/facade01config.py @@ -223,7 +223,7 @@ def insert_or_update_data(self, query, **bind_args)-> None: if isinstance(e.orig, DeadlockDetected): deadlock_detected = True sleep_time = random.choice(sleep_time_list) - self.logger.debug(f"Deadlock detected on query {query}...trying again in {round(sleep_time)} seconds: transaction size: {len(data)}") + self.logger.debug(f"Deadlock detected on query {query}...trying again in {round(sleep_time)} seconds") time.sleep(sleep_time) attempts += 1 From c8ef7a9cb6db7ec31a570ef36283e36eb2beefb2 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sat, 11 Feb 2023 14:34:07 -0600 Subject: [PATCH 124/134] Fix routes.py Signed-off-by: Andrew Brain --- augur/api/view/routes.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/augur/api/view/routes.py b/augur/api/view/routes.py index 10b9d72cb1..de8b9a10ce 100644 --- a/augur/api/view/routes.py +++ b/augur/api/view/routes.py @@ -168,12 +168,12 @@ def user_login(): last_name = request.form.get('last_name') admin = request.form.get('admin') or False - result = User.create_user(username, password, email, first_name, last_name, admin) - if not result[0]: - raise LoginException("An error occurred registering your account") - else: - user = User.get_user(db_session, username) - flash(result[1]["status"]) + result = User.create_user(username, password, email, first_name, last_name, admin) + if not result[0]: + raise LoginException("An error occurred registering your account") + else: + user = User.get_user(db_session, username) + flash(result[1]["status"]) # Log the user in if the password is valid if user.validate(password) and login_user(user, remember = remember): From a5f66f747515528e4795869c9761ab9c9962c5ac Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sat, 11 Feb 2023 16:37:03 -0600 Subject: [PATCH 125/134] Error fixes Signed-off-by: Andrew Brain --- .../pull_requests/commits_model/core.py | 29 +++++++++---------- .../test_models/test_augur_data/test_repo.py | 1 + 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/augur/tasks/github/pull_requests/commits_model/core.py b/augur/tasks/github/pull_requests/commits_model/core.py index 6e7d7bb22c..ffa152f6cd 100644 --- a/augur/tasks/github/pull_requests/commits_model/core.py +++ b/augur/tasks/github/pull_requests/commits_model/core.py @@ -24,23 +24,23 @@ def pull_request_commits_model(repo_id,logger): pr_urls = [] #pd.read_sql(pr_number_sql, self.db, params={}) - # TODO: Is this session ever closed? - session = GithubTaskSession(logger, engine) - pr_urls = session.fetchall_data_from_sql_text(pr_url_sql)#session.execute_sql(pr_number_sql).fetchall() - - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') + with DatabaseSession(logger, engine) as session: + pr_urls = session.fetchall_data_from_sql_text(pr_url_sql)#session.execute_sql(pr_number_sql).fetchall() + + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') owner, name = get_owner_repo(repo.repo_git) logger.info(f"Getting pull request commits for repo: {repo.repo_git}") - - for index,pr_info in enumerate(pr_urls): - logger.info(f'Querying commits for pull request #{index + 1} of {len(pr_urls)}') - commits_url = pr_info['pr_url'] + '/commits?state=all' + with GithubTaskSession(logger, engine) as session: + + for index,pr_info in enumerate(pr_urls): + logger.info(f'Querying commits for pull request #{index + 1} of {len(pr_urls)}') + + commits_url = pr_info['pr_url'] + '/commits?state=all' - try: #Paginate through the pr commits pr_commits = GithubPaginator(commits_url, session.oauths, logger) @@ -65,11 +65,8 @@ def pull_request_commits_model(repo_id,logger): #Execute bulk upsert pr_commits_natural_keys = [ "pull_request_id", "repo_id", "pr_cmt_sha"] session.insert_data(all_data,PullRequestCommit,pr_commits_natural_keys) - - except Exception as e: - logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") - logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) + + diff --git a/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py b/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py index 0a1bd4ceb2..d1da2bbe10 100644 --- a/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py +++ b/tests/test_applicaton/test_db/test_models/test_augur_data/test_repo.py @@ -23,6 +23,7 @@ def test_parse_github_repo_url(): assert Repo.parse_github_repo_url("https://github.com/chaoss/augur") == ("chaoss", "augur") assert Repo.parse_github_repo_url("https://github.com/chaoss/augur/") == ("chaoss", "augur") assert Repo.parse_github_repo_url("https://github.com/chaoss/augur.git") == ("chaoss", "augur") + assert Repo.parse_github_repo_url("https://github.com/chaoss/.github") == ("chaoss", ".github") def test_parse_github_org_url(): From 82a134d5f4d5dc26d41f0d2bcfdb34a5f46ace26 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sun, 12 Feb 2023 21:55:24 -0600 Subject: [PATCH 126/134] Reduce pool size to avoid connection overflow: Signed-off-by: Andrew Brain --- augur/tasks/init/celery_app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index a2ac88f347..3762054903 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -160,7 +160,7 @@ def init_worker(**kwargs): from augur.application.db.engine import DatabaseEngine - engine = DatabaseEngine(pool_size=10, max_overflow=20, pool_timeout=240).engine + engine = DatabaseEngine(pool_size=5, max_overflow=10, pool_timeout=240).engine @worker_process_shutdown.connect From 48a87f782b036ec18a7001145cca7689eeda7d5e Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 13 Feb 2023 11:34:28 -0600 Subject: [PATCH 127/134] Add context manager to analyze_commits_in_parallel Signed-off-by: Andrew Brain --- augur/tasks/git/facade_tasks.py | 118 ++++++++++++++++---------------- 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index d17617c93c..aec86d11a7 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -258,86 +258,86 @@ def analyze_commits_in_parallel(repo_id, multithreaded: bool)-> None: #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) # TODO: Is this session ever closed? - session = FacadeSession(logger) - start_date = session.get_setting('start_date') + with FacadeSession(logger) as session: + start_date = session.get_setting('start_date') - session.logger.info(f"Generating sequence for repo {repo_id}") - - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') + session.logger.info(f"Generating sequence for repo {repo_id}") + + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - #Get the huge list of commits to process. - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - # Grab the parents of HEAD + #Get the huge list of commits to process. + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + # Grab the parents of HEAD - parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " - "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], - stdout=subprocess.PIPE, shell=True) + parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " + "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], + stdout=subprocess.PIPE, shell=True) - parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) + parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) - # If there are no commits in the range, we still get a blank entry in - # the set. Remove it, as it messes with the calculations + # If there are no commits in the range, we still get a blank entry in + # the set. Remove it, as it messes with the calculations - if '' in parent_commits: - parent_commits.remove('') + if '' in parent_commits: + parent_commits.remove('') - # Grab the existing commits from the database + # Grab the existing commits from the database - existing_commits = set() + existing_commits = set() - find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id - """).bindparams(repo_id=repo_id) + find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id + """).bindparams(repo_id=repo_id) - #session.cfg.cursor.execute(find_existing, (repo[0], )) + #session.cfg.cursor.execute(find_existing, (repo[0], )) - try: - for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): - existing_commits.add(commit['cmt_commit_hash']) - except: - session.log_activity('Info', 'list(cfg.cursor) returned an error') + try: + for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): + existing_commits.add(commit['cmt_commit_hash']) + except: + session.log_activity('Info', 'list(cfg.cursor) returned an error') - # Find missing commits and add them + # Find missing commits and add them - missing_commits = parent_commits - existing_commits + missing_commits = parent_commits - existing_commits - session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") - - queue = [] - if len(missing_commits) > 0: - #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) - - #encode the repo_id with the commit. - commits = [commit for commit in list(missing_commits)] - #Get all missing commits into one large list to split into task pools - queue.extend(commits) - else: - return - - logger.info(f"Got to analysis!") - - for count, commitTuple in enumerate(queue): - quarterQueue = int(len(queue) / 4) + session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") + + queue = [] + if len(missing_commits) > 0: + #session.log_activity('Info','Type of missing_commits: %s' % type(missing_commits)) + + #encode the repo_id with the commit. + commits = [commit for commit in list(missing_commits)] + #Get all missing commits into one large list to split into task pools + queue.extend(commits) + else: + return + + logger.info(f"Got to analysis!") + + for count, commitTuple in enumerate(queue): + quarterQueue = int(len(queue) / 4) - if quarterQueue == 0: - quarterQueue = 1 # prevent division by zero with integer math + if quarterQueue == 0: + quarterQueue = 1 # prevent division by zero with integer math - #Log progress when another quarter of the queue has been processed - if (count + 1) % quarterQueue == 0: - logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") + #Log progress when another quarter of the queue has been processed + if (count + 1) % quarterQueue == 0: + logger.info(f"Progress through current analysis queue is {(count / len(queue)) * 100}%") - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query,'one') + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query,'one') - logger.info(f"Got to analysis!") - - for count, commitTuple in enumerate(queue): + logger.info(f"Got to analysis!") + + for count, commitTuple in enumerate(queue): - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - analyze_commit(session, repo_id, repo_loc, commitTuple) + analyze_commit(session, repo_id, repo_loc, commitTuple) - logger.info("Analysis complete") + logger.info("Analysis complete") return @celery.task From 403a07efc88bfd8c079a406e6c5f1f65a951f456 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 13 Feb 2023 14:27:09 -0600 Subject: [PATCH 128/134] fix pathing of discourse_analysis_task Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/discourse_analysis/tasks.py | 4 ++-- augur/tasks/git/facade_tasks.py | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index ac82d3987b..fa85804348 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -28,8 +28,8 @@ # from os import path stemmer = nltk.stem.snowball.SnowballStemmer("english") - -DISCOURSE_ANALYSIS_DIR = "augur/tasks/data_analysis/discourse_analysis/" +ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))) +DISCOURSE_ANALYSIS_DIR = f"{ROOT_AUGUR_DIRECTORY}/tasks/data_analysis/discourse_analysis/" @celery.task def discourse_analysis_task(): diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index aec86d11a7..11536c740a 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -465,11 +465,7 @@ def generate_analysis_sequence(logger,repo_git): repo_ids = [repo['repo_id'] for repo in repos] repo_id = repo_ids.pop(0) - - #determine amount of celery tasks to run at once in each grouped task load - concurrentTasks = int((-1 * (15/(len(repo_ids)+1))) + 15) - logger.info(f"Scheduling concurrent layers {concurrentTasks} tasks at a time.") - + analysis_sequence.append(facade_analysis_init_facade_task.si()) analysis_sequence.append(grab_comitters.si(repo_id)) From 5e2a00987e26983cd76f31e3c244ad12f05d8ee2 Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 13 Feb 2023 14:28:31 -0600 Subject: [PATCH 129/134] missing import Signed-off-by: Isaac Milarsky --- augur/tasks/data_analysis/discourse_analysis/tasks.py | 1 + 1 file changed, 1 insertion(+) diff --git a/augur/tasks/data_analysis/discourse_analysis/tasks.py b/augur/tasks/data_analysis/discourse_analysis/tasks.py index fa85804348..57ae59d77a 100644 --- a/augur/tasks/data_analysis/discourse_analysis/tasks.py +++ b/augur/tasks/data_analysis/discourse_analysis/tasks.py @@ -4,6 +4,7 @@ import pickle import re import nltk +import os from collections import Counter from augur.tasks.init.celery_app import celery_app as celery From 42031db04d3ed6fd81d325b46be3e9cc45f318ad Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 13 Feb 2023 15:06:04 -0600 Subject: [PATCH 130/134] Close facade sessions Signed-off-by: Andrew Brain --- augur/tasks/git/facade_tasks.py | 174 ++++++++++++++++---------------- 1 file changed, 89 insertions(+), 85 deletions(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 11536c740a..9599730db1 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -94,49 +94,50 @@ def trim_commits_facade_task(repo_id): from augur.tasks.init.celery_app import engine logger = logging.getLogger(trim_commits_facade_task.__name__) - session = FacadeSession(logger) - def update_analysis_log(repos_id,status): + with FacadeSession(logger) as session: - # Log a repo's analysis status + def update_analysis_log(repos_id,status): - log_message = s.sql.text("""INSERT INTO analysis_log (repos_id,status) - VALUES (:repo_id,:status)""").bindparams(repo_id=repos_id,status=status) + # Log a repo's analysis status - try: - session.execute_sql(log_message) - except: - pass + log_message = s.sql.text("""INSERT INTO analysis_log (repos_id,status) + VALUES (:repo_id,:status)""").bindparams(repo_id=repos_id,status=status) + try: + session.execute_sql(log_message) + except: + pass - session.inc_repos_processed() - update_analysis_log(repo_id,"Beginning analysis.") - # First we check to see if the previous analysis didn't complete - get_status = s.sql.text("""SELECT working_commit FROM working_commits WHERE repos_id=:repo_id - """).bindparams(repo_id=repo_id) + session.inc_repos_processed() + update_analysis_log(repo_id,"Beginning analysis.") + # First we check to see if the previous analysis didn't complete - try: - working_commits = session.fetchall_data_from_sql_text(get_status) - except: - working_commits = [] + get_status = s.sql.text("""SELECT working_commit FROM working_commits WHERE repos_id=:repo_id + """).bindparams(repo_id=repo_id) - # If there's a commit still there, the previous run was interrupted and - # the commit data may be incomplete. It should be trimmed, just in case. - for commit in working_commits: - trim_commit(session, repo_id,commit['working_commit']) + try: + working_commits = session.fetchall_data_from_sql_text(get_status) + except: + working_commits = [] + + # If there's a commit still there, the previous run was interrupted and + # the commit data may be incomplete. It should be trimmed, just in case. + for commit in working_commits: + trim_commit(session, repo_id,commit['working_commit']) - # Remove the working commit. - remove_commit = s.sql.text("""DELETE FROM working_commits - WHERE repos_id = :repo_id AND - working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) - session.execute_sql(remove_commit) - session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") + # Remove the working commit. + remove_commit = s.sql.text("""DELETE FROM working_commits + WHERE repos_id = :repo_id AND + working_commit = :commit""").bindparams(repo_id=repo_id,commit=commit['working_commit']) + session.execute_sql(remove_commit) + session.log_activity('Debug',f"Removed working commit: {commit['working_commit']}") - # Start the main analysis + # Start the main analysis - update_analysis_log(repo_id,'Collecting data') - logger.info(f"Got past repo {repo_id}") + update_analysis_log(repo_id,'Collecting data') + logger.info(f"Got past repo {repo_id}") @celery.task def trim_commits_post_analysis_facade_task(repo_id): @@ -146,83 +147,83 @@ def trim_commits_post_analysis_facade_task(repo_id): logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) - session = FacadeSession(logger) - start_date = session.get_setting('start_date') - def update_analysis_log(repos_id,status): + with FacadeSession as session: + start_date = session.get_setting('start_date') + def update_analysis_log(repos_id,status): - # Log a repo's analysis status + # Log a repo's analysis status - log_message = s.sql.text("""INSERT INTO analysis_log (repos_id,status) - VALUES (:repo_id,:status)""").bindparams(repo_id=repos_id,status=status) + log_message = s.sql.text("""INSERT INTO analysis_log (repos_id,status) + VALUES (:repo_id,:status)""").bindparams(repo_id=repos_id,status=status) + + session.execute_sql(log_message) - session.execute_sql(log_message) - - session.logger.info(f"Generating sequence for repo {repo_id}") + session.logger.info(f"Generating sequence for repo {repo_id}") - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - #Get the huge list of commits to process. - repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") - # Grab the parents of HEAD + #Get the huge list of commits to process. + repo_loc = (f"{session.repo_base_directory}{repo.repo_group_id}/{repo.repo_path}{repo.repo_name}/.git") + # Grab the parents of HEAD - parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " - "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], - stdout=subprocess.PIPE, shell=True) + parents = subprocess.Popen(["git --git-dir %s log --ignore-missing " + "--pretty=format:'%%H' --since=%s" % (repo_loc,start_date)], + stdout=subprocess.PIPE, shell=True) - parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) + parent_commits = set(parents.stdout.read().decode("utf-8",errors="ignore").split(os.linesep)) - # If there are no commits in the range, we still get a blank entry in - # the set. Remove it, as it messes with the calculations + # If there are no commits in the range, we still get a blank entry in + # the set. Remove it, as it messes with the calculations - if '' in parent_commits: - parent_commits.remove('') + if '' in parent_commits: + parent_commits.remove('') - # Grab the existing commits from the database + # Grab the existing commits from the database - existing_commits = set() + existing_commits = set() - find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id - """).bindparams(repo_id=repo_id) + find_existing = s.sql.text("""SELECT DISTINCT cmt_commit_hash FROM commits WHERE repo_id=:repo_id + """).bindparams(repo_id=repo_id) - #session.cfg.cursor.execute(find_existing, (repo[0], )) + #session.cfg.cursor.execute(find_existing, (repo[0], )) - try: - for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): - existing_commits.add(commit['cmt_commit_hash']) - except: - session.log_activity('Info', 'list(cfg.cursor) returned an error') + try: + for commit in session.fetchall_data_from_sql_text(find_existing):#list(session.cfg.cursor): + existing_commits.add(commit['cmt_commit_hash']) + except: + session.log_activity('Info', 'list(cfg.cursor) returned an error') - # Find missing commits and add them + # Find missing commits and add them - missing_commits = parent_commits - existing_commits + missing_commits = parent_commits - existing_commits - session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") - - # Find commits which are out of the analysis range + session.log_activity('Debug',f"Commits missing from repo {repo_id}: {len(missing_commits)}") + + # Find commits which are out of the analysis range - trimmed_commits = existing_commits - parent_commits + trimmed_commits = existing_commits - parent_commits - update_analysis_log(repo_id,'Data collection complete') + update_analysis_log(repo_id,'Data collection complete') - update_analysis_log(repo_id,'Beginning to trim commits') + update_analysis_log(repo_id,'Beginning to trim commits') - session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") + session.log_activity('Debug',f"Commits to be trimmed from repo {repo_id}: {len(trimmed_commits)}") - for commit in trimmed_commits: - trim_commit(session,repo_id,commit) - - set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' - """).bindparams(repo_id=repo_id) + for commit in trimmed_commits: + trim_commit(session,repo_id,commit) + + set_complete = s.sql.text("""UPDATE repo SET repo_status='Complete' WHERE repo_id=:repo_id and repo_status != 'Empty' + """).bindparams(repo_id=repo_id) - session.execute_sql(set_complete) + session.execute_sql(set_complete) - update_analysis_log(repo_id,'Commit trimming complete') + update_analysis_log(repo_id,'Commit trimming complete') - update_analysis_log(repo_id,'Complete') + update_analysis_log(repo_id,'Complete') @@ -242,9 +243,9 @@ def facade_start_contrib_analysis_task(): from augur.tasks.init.celery_app import engine logger = logging.getLogger(facade_start_contrib_analysis_task.__name__) - session = FacadeSession(logger) - session.update_status('Updating Contributors') - session.log_activity('Info', 'Updating Contributors with commits') + with FacadeSession(logger) as session: + session.update_status('Updating Contributors') + session.log_activity('Info', 'Updating Contributors with commits') #enable celery multithreading @@ -257,7 +258,6 @@ def analyze_commits_in_parallel(repo_id, multithreaded: bool)-> None: #create new session for celery thread. logger = logging.getLogger(analyze_commits_in_parallel.__name__) - # TODO: Is this session ever closed? with FacadeSession(logger) as session: start_date = session.get_setting('start_date') @@ -465,7 +465,11 @@ def generate_analysis_sequence(logger,repo_git): repo_ids = [repo['repo_id'] for repo in repos] repo_id = repo_ids.pop(0) - + + #determine amount of celery tasks to run at once in each grouped task load + concurrentTasks = int((-1 * (15/(len(repo_ids)+1))) + 15) + logger.info(f"Scheduling concurrent layers {concurrentTasks} tasks at a time.") + analysis_sequence.append(facade_analysis_init_facade_task.si()) analysis_sequence.append(grab_comitters.si(repo_id)) From 08d498950f749683204b10484c2c937975c785ad Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 13 Feb 2023 15:06:52 -0600 Subject: [PATCH 131/134] Remove unneed line Signed-off-by: Andrew Brain --- augur/tasks/git/dependency_tasks/tasks.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/augur/tasks/git/dependency_tasks/tasks.py b/augur/tasks/git/dependency_tasks/tasks.py index 20f4210dc4..7e151a1b26 100644 --- a/augur/tasks/git/dependency_tasks/tasks.py +++ b/augur/tasks/git/dependency_tasks/tasks.py @@ -14,8 +14,6 @@ def process_dependency_metrics(repo_git): logger = logging.getLogger(process_dependency_metrics.__name__) - session = DatabaseSession(logger, engine) - with DatabaseSession(logger, engine) as session: query = session.query(Repo).filter(Repo.repo_git == repo_git) repo = execute_session_query(query,'one') From 98c02242cac5c1038de5cf3b1f8dbb5a60fea1dc Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 13 Feb 2023 15:07:47 -0600 Subject: [PATCH 132/134] Use context manager Signed-off-by: Andrew Brain --- .../github/pull_requests/files_model/core.py | 101 +++++++++--------- 1 file changed, 50 insertions(+), 51 deletions(-) diff --git a/augur/tasks/github/pull_requests/files_model/core.py b/augur/tasks/github/pull_requests/files_model/core.py index c2d810f911..91d3b1aded 100644 --- a/augur/tasks/github/pull_requests/files_model/core.py +++ b/augur/tasks/github/pull_requests/files_model/core.py @@ -24,70 +24,69 @@ def pull_request_files_model(repo_id,logger): pr_numbers = [] #pd.read_sql(pr_number_sql, self.db, params={}) - # TODO: Is this session ever closed? - session = GithubTaskSession(logger, engine) - result = session.execute_sql(pr_number_sql).fetchall() - pr_numbers = [dict(zip(row.keys(), row)) for row in result] + with GithubTaskSession(logger, engine) as session: + result = session.execute_sql(pr_number_sql).fetchall() + pr_numbers = [dict(zip(row.keys(), row)) for row in result] - query = session.query(Repo).filter(Repo.repo_id == repo_id) - repo = execute_session_query(query, 'one') + query = session.query(Repo).filter(Repo.repo_id == repo_id) + repo = execute_session_query(query, 'one') - owner, name = get_owner_repo(repo.repo_git) + owner, name = get_owner_repo(repo.repo_git) - pr_file_rows = [] - logger.info(f"Getting pull request files for repo: {repo.repo_git}") - for index,pr_info in enumerate(pr_numbers): + pr_file_rows = [] + logger.info(f"Getting pull request files for repo: {repo.repo_git}") + for index,pr_info in enumerate(pr_numbers): - logger.info(f'Querying files for pull request #{index + 1} of {len(pr_numbers)}') - - query = """ + logger.info(f'Querying files for pull request #{index + 1} of {len(pr_numbers)}') + + query = """ - query($repo: String!, $owner: String!,$pr_number: Int!, $numRecords: Int!, $cursor: String) { - repository(name: $repo, owner: $owner) { - pullRequest(number: $pr_number) { - files ( first: $numRecords, after: $cursor) - { - edges { - node { - additions - deletions - path + query($repo: String!, $owner: String!,$pr_number: Int!, $numRecords: Int!, $cursor: String) { + repository(name: $repo, owner: $owner) { + pullRequest(number: $pr_number) { + files ( first: $numRecords, after: $cursor) + { + edges { + node { + additions + deletions + path + } + } + totalCount + pageInfo { + hasNextPage + endCursor } - } - totalCount - pageInfo { - hasNextPage - endCursor } } } } + """ + + values = ("repository","pullRequest","files") + params = { + 'owner' : owner, + 'repo' : name, + 'pr_number' : pr_info['pr_src_number'], + 'values' : values } - """ - - values = ("repository","pullRequest","files") - params = { - 'owner' : owner, - 'repo' : name, - 'pr_number' : pr_info['pr_src_number'], - 'values' : values - } - try: - file_collection = GraphQlPageCollection(query, session.oauths, session.logger,bind=params) + try: + file_collection = GraphQlPageCollection(query, session.oauths, session.logger,bind=params) - pr_file_rows += [{ - 'pull_request_id': pr_info['pull_request_id'], - 'pr_file_additions': pr_file['additions'] if 'additions' in pr_file else None, - 'pr_file_deletions': pr_file['deletions'] if 'deletions' in pr_file else None, - 'pr_file_path': pr_file['path'], - 'data_source': 'GitHub API', - 'repo_id': repo_id, - } for pr_file in file_collection if pr_file and 'path' in pr_file] - except Exception as e: - logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") - logger.error( - ''.join(traceback.format_exception(None, e, e.__traceback__))) + pr_file_rows += [{ + 'pull_request_id': pr_info['pull_request_id'], + 'pr_file_additions': pr_file['additions'] if 'additions' in pr_file else None, + 'pr_file_deletions': pr_file['deletions'] if 'deletions' in pr_file else None, + 'pr_file_path': pr_file['path'], + 'data_source': 'GitHub API', + 'repo_id': repo_id, + } for pr_file in file_collection if pr_file and 'path' in pr_file] + except Exception as e: + logger.error(f"Ran into error with pull request #{index + 1} in repo {repo_id}") + logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) From 1bbb3cfa36921786103e1d79f67c0ac536cf4227 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Mon, 13 Feb 2023 15:22:21 -0600 Subject: [PATCH 133/134] Fix syntax error Signed-off-by: Andrew Brain --- augur/tasks/git/facade_tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/augur/tasks/git/facade_tasks.py b/augur/tasks/git/facade_tasks.py index 9599730db1..1b1a90f6dd 100644 --- a/augur/tasks/git/facade_tasks.py +++ b/augur/tasks/git/facade_tasks.py @@ -147,7 +147,7 @@ def trim_commits_post_analysis_facade_task(repo_id): logger = logging.getLogger(trim_commits_post_analysis_facade_task.__name__) - with FacadeSession as session: + with FacadeSession(logger) as session: start_date = session.get_setting('start_date') def update_analysis_log(repos_id,status): From fe2d1d44dcaf479987ee819f653363f9969cb20e Mon Sep 17 00:00:00 2001 From: Isaac Milarsky Date: Mon, 13 Feb 2023 20:04:23 -0600 Subject: [PATCH 134/134] insight_worker tasks Signed-off-by: Isaac Milarsky --- augur/tasks/init/celery_app.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/augur/tasks/init/celery_app.py b/augur/tasks/init/celery_app.py index 3762054903..f8a7ecfda3 100644 --- a/augur/tasks/init/celery_app.py +++ b/augur/tasks/init/celery_app.py @@ -40,7 +40,8 @@ data_analysis_tasks = ['augur.tasks.data_analysis.message_insights.tasks', 'augur.tasks.data_analysis.clustering_worker.tasks', 'augur.tasks.data_analysis.discourse_analysis.tasks', - 'augur.tasks.data_analysis.pull_request_analysis_worker.tasks'] + 'augur.tasks.data_analysis.pull_request_analysis_worker.tasks', + 'augur.tasks.data_analysis.insight_worker.tasks'] materialized_view_tasks = ['augur.tasks.db.refresh_materialized_views']