Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

iib: add negative test, fix when no ci_jobs in config #43

Merged
merged 1 commit into from
Mar 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 24 additions & 21 deletions ci_jobs_trigger/libs/operators_iib_trigger/iib_trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
LOCAL_REPO_PATH = "/tmp/ci-jobs-trigger"
OPERATORS_DATA_FILE_NAME = "operators-latest-iib.json"
OPERATORS_DATA_FILE = os.path.join(LOCAL_REPO_PATH, OPERATORS_DATA_FILE_NAME)
LOG_PREFIX = "iib-trigger:"


def read_data_file():
Expand All @@ -32,7 +33,7 @@ def read_data_file():


def get_operator_data_from_url(operator_name, ocp_version, logger):
logger.info(f"Getting IIB data for {operator_name}")
logger.info(f"{LOG_PREFIX} Getting IIB data for {operator_name}")
datagrepper_query_url = (
"https://datagrepper.engineering.redhat.com/raw?topic=/topic/"
"VirtualTopic.eng.ci.redhat-container-image.index.built"
Expand All @@ -42,7 +43,7 @@ def get_operator_data_from_url(operator_name, ocp_version, logger):
f"{datagrepper_query_url}&contains={operator_name}",
verify=False,
)
logger.info(f"Done getting IIB data for {operator_name}")
logger.info(f"{LOG_PREFIX} Done getting IIB data for {operator_name}")
json_res = res.json()
for raw_msg in json_res["raw_messages"]:
_index = raw_msg["msg"]["index"]
Expand All @@ -54,7 +55,9 @@ def get_new_iib(config_data, logger):
new_trigger_data = False
data_from_file = read_data_file()
new_data = copy.deepcopy(data_from_file)
ci_jobs = config_data.get("ci_jobs", {})
if (ci_jobs := config_data.get("ci_jobs", {})) is None:
logger.error(f"{LOG_PREFIX} No ci_jobs found in config")
return {}

for _ocp_version, _jobs_data in ci_jobs.items():
if _jobs_data:
Expand All @@ -68,7 +71,7 @@ def get_new_iib(config_data, logger):
new_data[_ocp_version][job_name]["operators"].setdefault(_operator_name, {})
_operator_data = new_data[_ocp_version][job_name]["operators"][_operator_name]
_operator_data["triggered"] = False
logger.info(f"Parsing new IIB data for {_operator_name}")
logger.info(f"{LOG_PREFIX} Parsing new IIB data for {_operator_name}")
for iib_data in get_operator_data_from_url(
operator_name=_operator,
ocp_version=_ocp_version,
Expand All @@ -90,10 +93,10 @@ def get_new_iib(config_data, logger):
_operator_data["triggered"] = True
new_trigger_data = True

logger.info(f"Done parsing new IIB data for {_jobs_data}")
logger.info(f"{LOG_PREFIX} Done parsing new IIB data for {_jobs_data}")

if new_trigger_data:
logger.info(f"New IIB data found: {new_data}\nOld IIB data: {data_from_file}")
logger.info(f"{LOG_PREFIX} New IIB data found: {new_data}\nOld IIB data: {data_from_file}")
with open(OPERATORS_DATA_FILE, "w") as fd:
fd.write(json.dumps(new_data))

Expand All @@ -107,16 +110,16 @@ def clone_repo(repo_url):

@contextmanager
def change_directory(directory, logger):
logger.info(f"Changing directory to {directory}")
logger.info(f"{LOG_PREFIX} Changing directory to {directory}")
old_cwd = os.getcwd()
yield os.chdir(directory)
logger.info(f"Changing back to directory {old_cwd}")
logger.info(f"{LOG_PREFIX} Changing back to directory {old_cwd}")
os.chdir(old_cwd)


def push_changes(repo_url, slack_webhook_url, logger):
has_changes = False
logger.info(f"Check if {OPERATORS_DATA_FILE} was changed")
logger.info(f"{LOG_PREFIX} Check if {OPERATORS_DATA_FILE} was changed")
with change_directory(directory=LOCAL_REPO_PATH, logger=logger):
try:
_git_repo = Repo(LOCAL_REPO_PATH)
Expand All @@ -126,26 +129,26 @@ def push_changes(repo_url, slack_webhook_url, logger):

if OPERATORS_DATA_FILE_NAME in _git_repo.git.status():
has_changes = True
logger.info(f"Found changes for {OPERATORS_DATA_FILE_NAME}, pushing new changes")
logger.info(f"Run pre-commit on {OPERATORS_DATA_FILE_NAME}")
logger.info(f"{LOG_PREFIX} Found changes for {OPERATORS_DATA_FILE_NAME}, pushing new changes")
logger.info(f"{LOG_PREFIX} Run pre-commit on {OPERATORS_DATA_FILE_NAME}")
os.system(f"pre-commit run --files {OPERATORS_DATA_FILE_NAME}")
logger.info(f"Adding {OPERATORS_DATA_FILE_NAME} to git")
logger.info(f"{LOG_PREFIX} Adding {OPERATORS_DATA_FILE_NAME} to git")
_git_repo.git.add(OPERATORS_DATA_FILE_NAME)
logger.info(f"Committing changes for {OPERATORS_DATA_FILE_NAME}")
logger.info(f"{LOG_PREFIX} Committing changes for {OPERATORS_DATA_FILE_NAME}")
_git_repo.git.commit("-m", f"'Auto update: {OPERATORS_DATA_FILE_NAME}'")
logger.info(f"Push new changes for {OPERATORS_DATA_FILE}")
logger.info(f"{LOG_PREFIX} Push new changes for {OPERATORS_DATA_FILE}")
_git_repo.git.push(repo_url)
logger.info(f"New changes for {OPERATORS_DATA_FILE_NAME} pushed")
logger.info(f"{LOG_PREFIX} New changes for {OPERATORS_DATA_FILE_NAME} pushed")
except Exception as ex:
err_msg = f"Failed to update {OPERATORS_DATA_FILE_NAME}. {ex}"
err_msg = f"{LOG_PREFIX} Failed to update {OPERATORS_DATA_FILE_NAME}. {ex}"
logger.error(err_msg)
send_slack_message(message=err_msg, webhook_url=slack_webhook_url, logger=logger)

logger.info(f"Done check if {OPERATORS_DATA_FILE} was changed. was changed: {has_changes}")
logger.info(f"{LOG_PREFIX} Done check if {OPERATORS_DATA_FILE} was changed. was changed: {has_changes}")


def fetch_update_iib_and_trigger_jobs(logger, config_dict=None):
logger.info("Check for new operators IIB")
logger.info(f"{LOG_PREFIX} Check for new operators IIB")
config_data = get_config(os_environ="CI_IIB_JOBS_TRIGGER_CONFIG", logger=logger, config_dict=config_dict)
slack_errors_webhook_url = config_data.get("slack_errors_webhook_url")
token = config_data["github_token"]
Expand Down Expand Up @@ -181,16 +184,16 @@ def run_iib_update(logger):
try:
failed_triggered_jobs = fetch_update_iib_and_trigger_jobs(logger=logger)
if failed_triggered_jobs:
logger.info(f"Failed triggered jobs: {failed_triggered_jobs}")
logger.info(f"{LOG_PREFIX} Failed triggered jobs: {failed_triggered_jobs}")

except Exception as ex:
err_msg = f"Fail to run run_iib_update function. {ex}"
err_msg = f"{LOG_PREFIX} Fail to run run_iib_update function. {ex}"
logger.error(err_msg)
slack_errors_webhook_url = get_config(os_environ="CI_IIB_JOBS_TRIGGER_CONFIG", logger=logger).get(
"slack_errors_webhook_url"
)
send_slack_message(message=err_msg, webhook_url=slack_errors_webhook_url, logger=logger)

finally:
logger.info("Done check for new operators IIB, sleeping for 1 day")
logger.info(f"{LOG_PREFIX} Done check for new operators IIB, sleeping for 1 day")
sleep(DAYS_TO_SECONDS)
Original file line number Diff line number Diff line change
Expand Up @@ -27,37 +27,53 @@ def json():


@pytest.fixture()
def config_dict(tmp_path_factory):
def base_config_dict():
return {
"trigger_token": "token",
"github_token": "token",
"jenkins_token": "token",
"jenkins_username": "user",
"jenkins_url": "https://jenkins",
"ci_jobs": {
"v4.15": [
{
"name": "openshift-ci-job-name",
"ci": "openshift-ci",
"products": {
"product": "operator",
},
}
],
"v4.16": [
{
"name": "jenkins-job-name",
"ci": "jenkins",
"products": {
"product": "operator",
},
}
],
},
}


def test_fetch_update_iib_and_trigger_jobs(mocker, functions_mocker, config_dict):
@pytest.fixture()
def config_dict(base_config_dict):
base_config_dict["ci_jobs"] = {
"v4.15": [
{
"name": "openshift-ci-job-name",
"ci": "openshift-ci",
"products": {
"product": "operator",
},
}
],
"v4.16": [
{
"name": "jenkins-job-name",
"ci": "jenkins",
"products": {
"product": "operator",
},
}
],
}

return base_config_dict


@pytest.fixture()
def config_dict_no_ci_jobs(base_config_dict):
base_config_dict["ci_jobs"] = None
return base_config_dict


def test_fetch_update_iib_and_trigger_jobs_no_ci_jobs_config(mocker, functions_mocker, config_dict_no_ci_jobs):
mocker.patch.object(requests, "get", return_value=MockRequestGet())
assert not fetch_update_iib_and_trigger_jobs(config_dict=config_dict_no_ci_jobs, logger=LOGGER)


def test_fetch_update_iib_and_trigger_jobs(mocker, functions_mocker, config_dict):
mocker.patch.object(requests, "get", return_value=MockRequestGet())
fetch_update_iib_and_trigger_jobs(config_dict=config_dict, logger=LOGGER)