Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add trigger by IIB and trigger by repository changes #23

Merged
merged 6 commits into from
Mar 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
35 changes: 30 additions & 5 deletions ci_jobs_trigger/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,17 @@
from simple_logger.logger import get_logger
from flask.logging import default_handler

from ci_jobs_trigger.libs.addons_webhook_trigger.addons_webhook_trigger import (
process_hook,
ADDONS_WEBHOOK_JOBS_TRIGGER_CONFIG_STR,
)
from ci_jobs_trigger.libs.openshift_ci.re_trigger.re_trigger import JobTriggering
from ci_jobs_trigger.libs.openshift_ci.ztream_trigger.zstream_trigger import (
OPENSHIFT_CI_ZSTREAM_TRIGGER_CONFIG_OS_ENV_STR,
monitor_and_trigger,
process_and_trigger_jobs,
)
from ci_jobs_trigger.libs.operators_iib_trigger.iib_trigger import run_iib_update
from ci_jobs_trigger.utils.general import (
get_config,
process_webhook_exception,
Expand All @@ -30,8 +35,7 @@ def zstream_trigger():
try:
version = request.query_string.decode()
APP.logger.info(f"Processing version: {version}")
process_and_trigger_jobs(version=version, logger=APP.logger)
return "Process done"
return process_and_trigger_jobs(version=version, logger=APP.logger)
except Exception as ex:
return process_webhook_exception(
logger=APP.logger,
Expand All @@ -48,8 +52,7 @@ def openshift_ci_job_re_trigger():
hook_data = request.json
try:
job_triggering = JobTriggering(hook_data=hook_data, logger=APP.logger)
job_triggering.execute_trigger()
return "Process done"
return job_triggering.execute_trigger()

except Exception as ex:
return process_webhook_exception(
Expand All @@ -60,7 +63,29 @@ def openshift_ci_job_re_trigger():
)


@APP.route("/addons-trigger", methods=["POST"])
def process():
try:
hook_data = request.json
APP.logger.info(f"{hook_data['repository']['name']}: Event type: {hook_data['event_type']}")
return process_hook(data=hook_data, logger=APP.logger)
except Exception as ex:
return process_webhook_exception(
logger=APP.logger,
ex=ex,
route="addons-trigger",
slack_errors_webhook_url=get_config(
os_environ=ADDONS_WEBHOOK_JOBS_TRIGGER_CONFIG_STR, logger=APP.logger
).get("slack_errors_webhook_url"),
)


if __name__ == "__main__":
run_in_process(targets={monitor_and_trigger: {"logger": APP.logger}})
run_in_process(
targets={
monitor_and_trigger: {"logger": APP.logger},
run_iib_update: {"logger": APP.logger},
}
)
APP.logger.info(f"Starting {APP.name} app")
APP.run(port=5000, host="0.0.0.0", use_reloader=False)
Empty file.
116 changes: 116 additions & 0 deletions ci_jobs_trigger/libs/addons_webhook_trigger/addons_webhook_trigger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
import re

import gitlab

from ci_jobs_trigger.libs.utils.general import trigger_ci_job
from ci_jobs_trigger.utils.general import get_config


ADDONS_WEBHOOK_JOBS_TRIGGER_CONFIG_STR = "ADDONS_WEBHOOK_JOBS_TRIGGER_CONFIG"


class RepositoryNotFoundError(Exception):
pass


def get_gitlab_api(url, token):
gitlab_api = gitlab.Gitlab(url=url, private_token=token, ssl_verify=False)
gitlab_api.auth()
return gitlab_api


def repo_data_from_config(repository_name, config_data):
data = config_data["repositories"].get(repository_name)
if not data:
raise RepositoryNotFoundError(f"Repository {repository_name} not found in config file")

return data


def get_merge_request(repository_data, object_attributes, project, logger):
api = get_gitlab_api(url=repository_data["gitlab_url"], token=repository_data["gitlab_token"])
project = api.projects.get(project)
merge_request = project.mergerequests.get(object_attributes["iid"])
logger.info(f"{project.name}: New merge request [{merge_request.iid}] {merge_request.title}")
return merge_request


def process_hook(data, logger, config_dict=None):
def _trigger_jobs(
_addon,
_ocm_env,
_repository_data,
_config_data,
_logger,
_project,
):
openshift_ci = "openshift-ci"
jenkins_ci = "jenkins"
_openshift_ci_jobs = []
_jenkins_ci_jobs = []
openshift_ci_jobs_from_config = _repository_data["products_jobs_mapping"].get(openshift_ci, {})
jenkins_ci_jobs_from_config = _repository_data["products_jobs_mapping"].get(jenkins_ci, {})

for key, val in openshift_ci_jobs_from_config.items():
if key == _addon and [*val][0] == _ocm_env:
_openshift_ci_jobs.extend(val[_ocm_env])

for key, val in jenkins_ci_jobs_from_config.items():
if key == _addon and [*val][0] == _ocm_env:
_jenkins_ci_jobs.extend(val[_ocm_env])

if not _openshift_ci_jobs and not _jenkins_ci_jobs:
logger.info(f"{_project}: No job found for product: {_addon}")
return False

for _job in _openshift_ci_jobs:
trigger_ci_job(
job=_job,
product=_addon,
_type="addon",
ci=openshift_ci,
config_data=_config_data,
logger=_logger,
)

for _job in _jenkins_ci_jobs:
trigger_ci_job(
job=_job,
product=_addon,
_type="addon",
ci=jenkins_ci,
config_data=_config_data,
logger=_logger,
)
return True

object_attributes = data["object_attributes"]
if object_attributes.get("action") == "merge":
config_data = get_config(
config_dict=config_dict, os_environ=ADDONS_WEBHOOK_JOBS_TRIGGER_CONFIG_STR, logger=logger
)
repository_name = data["repository"]["name"]
repository_data = repo_data_from_config(repository_name=repository_name, config_data=config_data)
project = data["project"]["id"]
merge_request = get_merge_request(
repository_data=repository_data, object_attributes=object_attributes, project=project, logger=logger
)

for change in merge_request.changes().get("changes", []):
changed_file = change.get("new_path")
# TODO: Get product version from changed_file and send it to slack
matches = re.match(
r"addons/(?P<product>.*)/addonimagesets/(?P<env>production|stage)/.*.yaml",
changed_file,
)
if matches:
return _trigger_jobs(
_addon=matches.group("product"),
_ocm_env=matches.group("env"),
_repository_data=repository_data,
_config_data=config_data,
_logger=logger,
_project=project,
)

return True
Empty file.
20 changes: 20 additions & 0 deletions ci_jobs_trigger/libs/jenkins/utils/general.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from api4jenkins import Jenkins


def jenkins_trigger_job(job, config_data):
api = Jenkins(
url=config_data["jenkins_url"],
auth=(config_data["jenkins_username"], config_data["jenkins_token"]),
verify=False,
)
job = api.get_job(full_name=job)
job_params = {}
for param in job.get_parameters():
job_params[param["defaultParameterValue"]["name"]] = param["defaultParameterValue"]["value"]

try:
res = job.build(parameters=job_params)
build = res.get_build()
return build.exists(), build
except Exception:
return False, None
6 changes: 3 additions & 3 deletions ci_jobs_trigger/libs/openshift_ci/re_trigger/re_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@
from ci_jobs_trigger.libs.openshift_ci.re_trigger.job_db import DB
from ci_jobs_trigger.libs.openshift_ci.utils.constants import GANGWAY_API_URL, PROW_LOGS_URL_PREFIX
from ci_jobs_trigger.utils.general import OpenshiftCiReTriggerError, send_slack_message
from ci_jobs_trigger.libs.openshift_ci.utils.openshift_ci import (
from ci_jobs_trigger.libs.openshift_ci.utils.general import (
get_authorization_header,
trigger_job,
openshift_ci_trigger_job,
)


Expand Down Expand Up @@ -122,7 +122,7 @@ def wait_for_job_completed(self):

def _trigger_job(self):
self.logger.info(f"{self.log_prefix} Trigger job.")
response = trigger_job(job_name=self.job_name, trigger_token=self.trigger_token)
response = openshift_ci_trigger_job(job_name=self.job_name, trigger_token=self.trigger_token)

if not response.ok:
err_msg = f"Failed to get job status: {response.headers.get('grpc-message')}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from ci_jobs_trigger.libs.openshift_ci.utils.constants import GANGWAY_API_URL


def trigger_job(job_name, trigger_token):
def openshift_ci_trigger_job(job_name, trigger_token):
return requests.post(
url=f"{GANGWAY_API_URL}/{job_name}",
headers=get_authorization_header(trigger_token=trigger_token),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from semver import Version

from ci_jobs_trigger.utils.general import get_config, send_slack_message
from ci_jobs_trigger.libs.openshift_ci.utils.openshift_ci import trigger_job
from ci_jobs_trigger.libs.openshift_ci.utils.general import openshift_ci_trigger_job

OPENSHIFT_CI_ZSTREAM_TRIGGER_CONFIG_OS_ENV_STR = "OPENSHIFT_CI_ZSTREAM_TRIGGER_CONFIG"

Expand Down Expand Up @@ -38,7 +38,7 @@ def trigger_jobs(config, jobs, logger):
failed_triggers_jobs = []
successful_triggers_jobs = []
for job in jobs:
res = trigger_job(job_name=job, trigger_token=config["trigger_token"])
res = openshift_ci_trigger_job(job_name=job, trigger_token=config["trigger_token"])

if res.ok:
successful_triggers_jobs.append(job)
Expand All @@ -54,7 +54,7 @@ def trigger_jobs(config, jobs, logger):
if failed_triggers_jobs:
err_msg = f"Failed to trigger {len(failed_triggers_jobs)} jobs: {failed_triggers_jobs}"
logger.info(err_msg)
send_slack_message(message=err_msg, webhook_url=config["slack_errors_webhook_url"], logger=logger)
send_slack_message(message=err_msg, webhook_url=config.get("slack_errors_webhook_url"), logger=logger)
return False


Expand Down
Empty file.