Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

airbyte-ci: Add --arch flag to build #32808

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,9 @@ Build certified connectors:
Build connectors changed on the current branch:
`airbyte-ci connectors --modified build`

Build connectors with a specific architecture:
`airbyte-ci connectors --name=source-pokeapi build --arch=arm64 --arch=amd64`

#### What it runs

For Python and Low Code connectors:
Expand Down Expand Up @@ -433,6 +436,7 @@ This command runs the Python tests for a airbyte-ci poetry package.
## Changelog
| Version | PR | Description |
| ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- |
| 2.7.1 | [#32808](https://github.com/airbytehq/airbyte/pull/32808) | Add `--arch`` to airbyte-ci build |
| 2.7.0 | [#31930](https://github.com/airbytehq/airbyte/pull/31930) | Merge airbyte-ci-internal into airbyte-ci |
| 2.6.0 | [#31831](https://github.com/airbytehq/airbyte/pull/31831) | Add `airbyte-ci format` commands, remove connector-specific formatting check |
| 2.5.9 | [#32427](https://github.com/airbytehq/airbyte/pull/32427) | Re-enable caching for source-postgres |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#

from typing import List

import asyncclick as click
from pipelines.airbyte_ci.connectors.build_image.steps import run_connector_build_pipeline
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines
from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
from pipelines.consts import BUILD_PLATFORMS, LOCAL_BUILD_PLATFORM_STRING


@click.command(cls=DaggerPipelineCommand, help="Build all images for the selected connectors.")
Expand All @@ -17,9 +20,17 @@
default=False,
type=bool,
)
@click.option(
"--arch",
help="Architecture to build connector images for. Defaults to your local architecture.",
default=[LOCAL_BUILD_PLATFORM_STRING],
type=click.Choice(BUILD_PLATFORMS.keys()),
multiple=True,
)
@click.pass_context
async def build(ctx: click.Context, use_host_gradle_dist_tar: bool) -> bool:
async def build(ctx: click.Context, use_host_gradle_dist_tar: bool, arch: List[str]) -> bool:
"""Runs a build pipeline for the selected connectors."""
build_platforms = [BUILD_PLATFORMS[a] for a in arch]

connectors_contexts = [
ConnectorContext(
Expand All @@ -41,6 +52,7 @@ async def build(ctx: click.Context, use_host_gradle_dist_tar: bool) -> bool:
use_host_gradle_dist_tar=use_host_gradle_dist_tar,
s3_build_cache_access_key_id=ctx.obj.get("s3_build_cache_access_key_id"),
s3_build_cache_secret_key=ctx.obj.get("s3_build_cache_secret_key"),
build_platforms = build_platforms,
)
for connector in ctx.obj["selected_connectors_with_modified_files"]
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ async def run_connector_build_pipeline(context: ConnectorContext, semaphore: any
build_result = await run_connector_build(context)
step_results.append(build_result)
if context.is_local and build_result.status is StepStatus.SUCCESS:
load_image_result = await LoadContainerToLocalDockerHost(context, LOCAL_BUILD_PLATFORM, build_result.output_artifact).run()
step_results.append(load_image_result)
for build_platform in context.build_platforms:
load_image_result = await LoadContainerToLocalDockerHost(context, build_platform, build_result.output_artifact).run()
step_results.append(load_image_result)
context.report = ConnectorReport(context, step_results, name="BUILD RESULTS")
return context.report
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@ class BuildConnectorImagesBase(Step, ABC):
def title(self):
return f"Build {self.context.connector.technical_name} docker image for platform(s) {', '.join(self.build_platforms)}"

def __init__(self, context: ConnectorContext, *build_platforms: List[Platform]) -> None:
self.build_platforms = build_platforms if build_platforms else BUILD_PLATFORMS
def __init__(self, context: ConnectorContext) -> None:
self.build_platforms = context.build_platforms if context.build_platforms else BUILD_PLATFORMS.values()
super().__init__(context)

async def _run(self, *args) -> StepResult:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ async def run_connector_build(context: ConnectorContext) -> StepResult:
# Special case: use a local dist tar to speed up local development.
dist_dir = await context.dagger_client.host().directory(dist_tar_directory_path(context), include=["*.tar"])
# Speed things up by only building for the local platform.
return await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run(dist_dir)
return await BuildConnectorImages(context).run(dist_dir)

# Default case: distribution tar is built by the dagger pipeline.
build_connector_tar_result = await BuildConnectorDistributionTar(context).run()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@

from datetime import datetime
from types import TracebackType
from typing import Optional
from typing import List, Optional

import yaml
from anyio import Path
from asyncer import asyncify
from dagger import Directory, Secret
from dagger import Directory, Platform, Secret
from github import PullRequest
from pipelines.airbyte_ci.connectors.reports import ConnectorReport
from pipelines.dagger.actions import secrets
Expand Down Expand Up @@ -60,6 +60,7 @@ def __init__(
s3_build_cache_access_key_id: Optional[str] = None,
s3_build_cache_secret_key: Optional[str] = None,
concurrent_cat: Optional[bool] = False,
build_platforms: List[Platform] = None,
):
"""Initialize a connector context.

Expand Down Expand Up @@ -110,6 +111,7 @@ def __init__(
self.s3_build_cache_access_key_id = s3_build_cache_access_key_id
self.s3_build_cache_secret_key = s3_build_cache_secret_key
self.concurrent_cat = concurrent_cat
self.build_platforms = build_platforms

super().__init__(
pipeline_name=pipeline_name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ async def check_if_image_only_has_gzip_layers(self) -> bool:
We want to make sure that the image we are about to release is compatible with all docker versions.
We use crane to inspect the manifest of the image and check if it only has gzip layers.
"""
for platform in consts.BUILD_PLATFORMS:
for platform in consts.BUILD_PLATFORMS.values():
inspect = docker.with_crane(self.context).with_exec(
["manifest", "--platform", f"{str(platform)}", f"docker.io/{self.context.docker_image}"]
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines
from pipelines.airbyte_ci.connectors.test.pipeline import run_connector_test_pipeline
from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
from pipelines.consts import ContextState
from pipelines.consts import LOCAL_BUILD_PLATFORM, ContextState
from pipelines.helpers.github import update_global_commit_status_check_for_tests
from pipelines.helpers.utils import fail_if_missing_docker_hub_creds

Expand Down Expand Up @@ -95,6 +95,7 @@ async def test(
docker_hub_username=ctx.obj.get("docker_hub_username"),
docker_hub_password=ctx.obj.get("docker_hub_password"),
concurrent_cat=concurrent_cat,
build_platforms=[LOCAL_BUILD_PLATFORM],
)
for connector in ctx.obj["selected_connectors_with_modified_files"]
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ async def run_all_tests(context: ConnectorContext) -> List[StepResult]:

async def run_docker_build_dependent_steps(dist_tar_dir: Directory) -> List[StepResult]:
step_results = []
build_connector_image_results = await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run(dist_tar_dir)
build_connector_image_results = await BuildConnectorImages(context).run(dist_tar_dir)
step_results.append(build_connector_image_results)
if build_connector_image_results.status is StepStatus.FAILURE:
return step_results
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ async def run_all_tests(context: ConnectorContext) -> List[StepResult]:
List[StepResult]: The results of all the steps that ran or were skipped.
"""
step_results = []
build_connector_image_results = await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run()
build_connector_image_results = await BuildConnectorImages(context).run()
if build_connector_image_results.status is StepStatus.FAILURE:
return [build_connector_image_results]
step_results.append(build_connector_image_results)
Expand Down
17 changes: 11 additions & 6 deletions airbyte-ci/connectors/pipelines/pipelines/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,20 @@
"pytest-custom_exit_code",
]

BUILD_PLATFORMS = [Platform("linux/amd64"), Platform("linux/arm64")]
BUILD_PLATFORMS = {
"amd64": Platform("linux/amd64"),
"arm64": Platform("linux/arm64"),
}

PLATFORM_MACHINE_TO_DAGGER_PLATFORM = {
"x86_64": Platform("linux/amd64"),
"arm64": Platform("linux/arm64"),
"aarch64": Platform("linux/amd64"),
"amd64": Platform("linux/amd64"),
"x86_64": "amd64",
"arm64": "arm64",
"aarch64": "amd64",
"amd64": "amd64",
}
LOCAL_BUILD_PLATFORM = PLATFORM_MACHINE_TO_DAGGER_PLATFORM[platform.machine()]
LOCAL_BUILD_PLATFORM_STRING = PLATFORM_MACHINE_TO_DAGGER_PLATFORM[platform.machine()]
LOCAL_BUILD_PLATFORM = BUILD_PLATFORMS[LOCAL_BUILD_PLATFORM_STRING]

AMAZONCORRETTO_IMAGE = "amazoncorretto:17.0.8-al2023"
NODE_IMAGE = "node:18.18.0-slim"
GO_IMAGE = "golang:1.17"
Expand Down
2 changes: 1 addition & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "pipelines"
version = "2.7.0"
version = "2.7.1"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte <contact@airbyte.io>"]

Expand Down
Loading