Skip to content

Commit

Permalink
airbyte-ci: reduce required env var when running in CI (#37765)
Browse files Browse the repository at this point in the history
  • Loading branch information
alafanechere committed May 2, 2024
1 parent f16632f commit 8665eaf
Show file tree
Hide file tree
Showing 8 changed files with 28 additions and 24 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/community_ci.yml
Expand Up @@ -103,6 +103,8 @@ jobs:
git_branch: ${{ github.head_ref }}
git_revision: ${{ github.event.pull_request.head.sha }}
github_token: ${{ github.token }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
- name: Upload pipeline reports
id: upload-artifact
uses: actions/upload-artifact@v4
Expand Down
1 change: 1 addition & 0 deletions airbyte-ci/connectors/pipelines/README.md
Expand Up @@ -676,6 +676,7 @@ E.G.: running Poe tasks on the modified internal packages of the current branch:

| Version | PR | Description |
|---------| ---------------------------------------------------------- |----------------------------------------------------------------------------------------------------------------------------|
| 4.12.1 | [#37765](https://github.com/airbytehq/airbyte/pull/37765) | Relax the required env var to run in CI and handle their absence gracefully. |
| 4.12.0 | [#37690](https://github.com/airbytehq/airbyte/pull/37690) | Pass custom CI status name in `connectors test` |
| 4.11.0 | [#37641](https://github.com/airbytehq/airbyte/pull/37641) | Updates to run regression tests in GitHub Actions. |
| 4.10.5 | [#37641](https://github.com/airbytehq/airbyte/pull/37641) | Reintroduce changes from 4.10.0 with a fix. |
Expand Down
Expand Up @@ -99,8 +99,6 @@ def validate_environment(is_local: bool) -> None:
raise click.UsageError("You need to run this command from the repository root.")
else:
required_env_vars_for_ci = [
"GCP_GSM_CREDENTIALS",
"CI_REPORT_BUCKET_NAME",
"CI_GITHUB_ACCESS_TOKEN",
"DOCKER_HUB_USERNAME",
"DOCKER_HUB_PASSWORD",
Expand Down
Expand Up @@ -147,12 +147,12 @@ async def save_html_report(self) -> None:
await html_report_artifact.save_to_local_path(html_report_path)
absolute_path = html_report_path.absolute()
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
if self.remote_storage_enabled and self.pipeline_context.ci_gcs_credentials_secret and self.pipeline_context.ci_report_bucket:
if self.pipeline_context.remote_storage_enabled:
gcs_url = await html_report_artifact.upload_to_gcs(
dagger_client=self.pipeline_context.dagger_client,
bucket=self.pipeline_context.ci_report_bucket,
bucket=self.pipeline_context.ci_report_bucket, # type: ignore
key=self.html_report_remote_storage_key,
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, # type: ignore
)
self.pipeline_context.logger.info(f"HTML report uploaded to {gcs_url}")

Expand Down
Expand Up @@ -380,7 +380,7 @@ def regression_tests_command(self) -> List[str]:
exit $pytest_exit
"""
)
return [f"bash", "-c", f"'{run_pytest_with_proxy}'"]
return ["bash", "-c", f"'{run_pytest_with_proxy}'"]

def __init__(self, context: ConnectorContext) -> None:
"""Create a step to run regression tests for a connector.
Expand Down
Expand Up @@ -160,9 +160,10 @@ def report(self, report: Report | ConnectorReport) -> None:
self._report = report

@property
def ci_gcs_credentials_secret(self) -> Secret:
assert self.ci_gcs_credentials is not None, "The ci_gcs_credentials was not set on this PipelineContext."
return self.dagger_client.set_secret("ci_gcs_credentials", self.ci_gcs_credentials)
def ci_gcs_credentials_secret(self) -> Secret | None:
if self.ci_gcs_credentials is not None:
return self.dagger_client.set_secret("ci_gcs_credentials", self.ci_gcs_credentials)
return None

@property
def ci_github_access_token_secret(self) -> Secret:
Expand Down Expand Up @@ -210,6 +211,10 @@ def dagger_cloud_url(self) -> Optional[str]:

return f"https://alpha.dagger.cloud/changeByPipelines?filter=dagger.io/git.ref:{self.git_revision}"

@property
def remote_storage_enabled(self) -> bool:
return self.is_ci is True and self.ci_report_bucket is not None and self.ci_gcs_credentials_secret is not None

def get_repo_file(self, file_path: str) -> File:
"""Get a file from the current repository.
Expand Down
26 changes: 12 additions & 14 deletions airbyte-ci/connectors/pipelines/pipelines/models/reports.py
Expand Up @@ -84,10 +84,6 @@ def lead_duration(self) -> timedelta:
assert self.pipeline_context.stopped_at is not None, "The pipeline stopped_at timestamp must be set to save reports."
return self.pipeline_context.stopped_at - self.pipeline_context.created_at

@property
def remote_storage_enabled(self) -> bool:
return self.pipeline_context.is_ci

async def save(self) -> None:
self.report_dir_path.mkdir(parents=True, exist_ok=True)
await self.save_json_report()
Expand All @@ -103,14 +99,16 @@ async def save_json_report(self) -> None:
await json_report_artifact.save_to_local_path(json_report_path)
absolute_path = json_report_path.absolute()
self.pipeline_context.logger.info(f"Report saved locally at {absolute_path}")
if self.remote_storage_enabled and self.pipeline_context.ci_report_bucket and self.pipeline_context.ci_gcs_credentials_secret:
if self.pipeline_context.remote_storage_enabled:
gcs_url = await json_report_artifact.upload_to_gcs(
dagger_client=self.pipeline_context.dagger_client,
bucket=self.pipeline_context.ci_report_bucket,
bucket=self.pipeline_context.ci_report_bucket, # type: ignore
key=self.json_report_remote_storage_key,
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, # type: ignore
)
self.pipeline_context.logger.info(f"JSON Report uploaded to {gcs_url}")
else:
self.pipeline_context.logger.info("JSON Report not uploaded to GCS because remote storage is disabled.")

async def save_step_result_artifacts(self) -> None:
local_artifacts_dir = self.report_dir_path / "artifacts"
Expand All @@ -121,19 +119,19 @@ async def save_step_result_artifacts(self) -> None:
step_artifacts_dir = local_artifacts_dir / slugify(step_result.step.title)
step_artifacts_dir.mkdir(parents=True, exist_ok=True)
await artifact.save_to_local_path(step_artifacts_dir / artifact.name)
if (
self.remote_storage_enabled
and self.pipeline_context.ci_report_bucket
and self.pipeline_context.ci_gcs_credentials_secret
):
if self.pipeline_context.remote_storage_enabled:
upload_time = int(time.time())
gcs_url = await artifact.upload_to_gcs(
dagger_client=self.pipeline_context.dagger_client,
bucket=self.pipeline_context.ci_report_bucket,
bucket=self.pipeline_context.ci_report_bucket, # type: ignore
key=f"{self.report_output_prefix}/artifacts/{slugify(step_result.step.title)}/{upload_time}_{artifact.name}",
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret,
gcs_credentials=self.pipeline_context.ci_gcs_credentials_secret, # type: ignore
)
self.pipeline_context.logger.info(f"Artifact {artifact.name} for {step_result.step.title} uploaded to {gcs_url}")
else:
self.pipeline_context.logger.info(
f"Artifact {artifact.name} for {step_result.step.title} not uploaded to GCS because remote storage is disabled."
)

def to_json(self) -> str:
"""Create a JSON representation of the report.
Expand Down
2 changes: 1 addition & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "pipelines"
version = "4.12.0"
version = "4.12.1"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte <contact@airbyte.io>"]

Expand Down

0 comments on commit 8665eaf

Please sign in to comment.