Skip to content

Commit

Permalink
fix multi arch build (#33008)
Browse files Browse the repository at this point in the history
  • Loading branch information
alafanechere committed Dec 4, 2023
1 parent 161d3e9 commit 91bbdbf
Show file tree
Hide file tree
Showing 9 changed files with 155 additions and 87 deletions.
10 changes: 10 additions & 0 deletions .github/workflows/airbyte-ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@ concurrency:

on:
workflow_dispatch:
inputs:
airbyte_ci_binary_url:
description: "URL to airbyte-ci binary"
required: false
default: https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci
pull_request:
types:
- opened
Expand Down Expand Up @@ -69,6 +74,7 @@ jobs:
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
subcommand: "test airbyte-ci/connectors/connector_ops"
airbyte_ci_binary_url: ${{ inputs.airbyte_ci_binary_url || 'https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci' }}

- name: Run airbyte-ci/connectors/pipelines tests
id: run-airbyte-ci-connectors-pipelines-tests
Expand All @@ -82,6 +88,7 @@ jobs:
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
subcommand: "test airbyte-ci/connectors/pipelines"
airbyte_ci_binary_url: ${{ inputs.airbyte_ci_binary_url || 'https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci' }}

- name: Run airbyte-ci/connectors/base_images tests
id: run-airbyte-ci-connectors-base-images-tests
Expand All @@ -95,6 +102,7 @@ jobs:
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }}
subcommand: "test airbyte-ci/connectors/base_images"
airbyte_ci_binary_url: ${{ inputs.airbyte_ci_binary_url || 'https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci' }}

- name: Run test pipeline for the metadata lib
id: metadata_lib-test-pipeline
Expand All @@ -106,6 +114,7 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
airbyte_ci_binary_url: ${{ inputs.airbyte_ci_binary_url || 'https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci' }}
- name: Run test for the metadata orchestrator
id: metadata_orchestrator-test-pipeline
if: steps.changes.outputs.metadata_orchestrator_any_changed == 'true'
Expand All @@ -116,3 +125,4 @@ jobs:
github_token: ${{ secrets.GITHUB_TOKEN }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
airbyte_ci_binary_url: ${{ inputs.airbyte_ci_binary_url || 'https://connectors.airbyte.com/airbyte-ci/releases/ubuntu/latest/airbyte-ci' }}
7 changes: 6 additions & 1 deletion airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,10 @@ Build a single connector with a custom image tag:
Build a single connector for multiple architectures:
`airbyte-ci connectors --name=source-pokeapi build --architecture=linux/amd64 --architecture=linux/arm64`

You will get:
* `airbyte/source-pokeapi:dev-linux-amd64`
* `airbyte/source-pokeapi:dev-linux-arm64`

Build multiple connectors:
`airbyte-ci connectors --name=source-pokeapi --name=source-bigquery build`

Expand Down Expand Up @@ -305,7 +309,7 @@ flowchart TD

| Option | Multiple | Default value | Description |
| --------------------- | -------- | -------------- | ----------------------------------------------------------------- |
| `--architecture`/`-a` | True | Local platform | Defines for which architecture the connector image will be built. |
| `--architecture`/`-a` | True | Local platform | Defines for which architecture(s) the connector image will be built. |
| `--tag` | False | `dev` | Image tag for the built image. |


Expand Down Expand Up @@ -447,6 +451,7 @@ This command runs the Python tests for a airbyte-ci poetry package.
## Changelog
| Version | PR | Description |
| ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- |
| 2.10.2 | [#33008](https://github.com/airbytehq/airbyte/pull/33008) | Fix local `connector build`. |
| 2.10.1 | [#32928](https://github.com/airbytehq/airbyte/pull/32928) | Fix BuildConnectorImages constructor. |
| 2.10.0 | [#32819](https://github.com/airbytehq/airbyte/pull/32819) | Add `--tag` option to connector build. |
| 2.9.0 | [#32816](https://github.com/airbytehq/airbyte/pull/32816) | Add `--architecture` option to connector build. |
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from __future__ import annotations

import json
from abc import ABC
from typing import List, Optional, Tuple

import docker
from dagger import Container, ExecError, Platform, QueryError
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.helpers.utils import export_containers_to_tarball
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import Step, StepResult, StepStatus


Expand Down Expand Up @@ -61,6 +61,13 @@ def __init__(self, context: ConnectorContext, containers: dict[Platform, Contain
self.image_tag = image_tag
self.containers = containers

def _generate_dev_tag(self, platform: Platform, multi_platforms: bool):
"""
When building for multiple platforms, we need to tag the image with the platform name.
There's no way to locally build a multi-arch image, so we need to tag the image with the platform name when the user passed multiple architecture options.
"""
return f"{self.image_tag}-{platform.replace('/', '-')}" if multi_platforms else self.image_tag

@property
def title(self):
return f"Load {self.image_name}:{self.image_tag} to the local docker host."
Expand All @@ -70,27 +77,28 @@ def image_name(self) -> Tuple:
return f"airbyte/{self.context.connector.technical_name}"

async def _run(self) -> StepResult:
container_variants = list(self.containers.values())
_, exported_tar_path = await export_containers_to_tarball(self.context, container_variants)
if not exported_tar_path:
return StepResult(
self,
StepStatus.FAILURE,
stderr=f"Failed to export the connector image {self.image_name}:{self.image_tag} to a tarball.",
)
try:
client = docker.from_env()
response = client.api.import_image_from_file(str(exported_tar_path), repository=self.image_name, tag=self.image_tag)
try:
image_sha = json.loads(response)["status"]
except (json.JSONDecodeError, KeyError):
loaded_images = []
multi_platforms = len(self.containers) > 1
for platform, container in self.containers.items():
_, exported_tar_path = await export_container_to_tarball(self.context, container, platform)
if not exported_tar_path:
return StepResult(
self,
StepStatus.FAILURE,
stderr=f"Failed to import the connector image {self.image_name}:{self.image_tag} to your Docker host: {response}",
stderr=f"Failed to export the connector image {self.image_name}:{self.image_tag} to a tarball.",
)
return StepResult(
self, StepStatus.SUCCESS, stdout=f"Loaded image {self.image_name}:{self.image_tag} to your Docker host ({image_sha})."
)
except docker.errors.DockerException as e:
return StepResult(self, StepStatus.FAILURE, stderr=f"Something went wrong while interacting with the local docker client: {e}")
try:
client = docker.from_env()
image_tag = self._generate_dev_tag(platform, multi_platforms)
full_image_name = f"{self.image_name}:{image_tag}"
with open(exported_tar_path, "rb") as tarball_content:
new_image = client.images.load(tarball_content.read())[0]
new_image.tag(self.image_name, tag=image_tag)
image_sha = new_image.id
loaded_images.append(full_image_name)
except docker.errors.DockerException as e:
return StepResult(
self, StepStatus.FAILURE, stderr=f"Something went wrong while interacting with the local docker client: {e}"
)

return StepResult(self, StepStatus.SUCCESS, stdout=f"Loaded image {','.join(loaded_images)} to your Docker host ({image_sha}).")
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.actions import secrets
from pipelines.dagger.actions.system import docker
from pipelines.helpers.utils import export_containers_to_tarball
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import StepResult, StepStatus


Expand Down Expand Up @@ -101,15 +101,18 @@ async def run_docker_build_dependent_steps(dist_tar_dir: Directory) -> List[Step
context.logger.info(f"This connector supports normalization: will build {normalization_image}.")
build_normalization_results = await BuildOrPullNormalization(context, normalization_image, LOCAL_BUILD_PLATFORM).run()
normalization_container = build_normalization_results.output_artifact
normalization_tar_file, _ = await export_containers_to_tarball(
context, [normalization_container], tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar"
normalization_tar_file, _ = await export_container_to_tarball(
context,
normalization_container,
LOCAL_BUILD_PLATFORM,
tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar",
)
step_results.append(build_normalization_results)
else:
normalization_tar_file = None

connector_container = build_connector_image_results.output_artifact[LOCAL_BUILD_PLATFORM]
connector_image_tar_file, _ = await export_containers_to_tarball(context, [connector_container])
connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container, LOCAL_BUILD_PLATFORM)

async with asyncer.create_task_group() as docker_build_dependent_group:
soon_integration_tests_results = docker_build_dependent_group.soonify(IntegrationTests(context).run)(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ async def test(pipeline_context: ClickPipelineContext):
.with_workdir(f"/airbyte/{poetry_package_path}")
.with_exec(["poetry", "install"])
.with_unix_socket("/var/run/docker.sock", dagger_client.host().unix_socket("/var/run/docker.sock"))
.with_env_variable("CI", str(pipeline_context.params["is_ci"]))
.with_exec(["poetry", "run", "pytest", test_directory])
)

Expand Down
22 changes: 12 additions & 10 deletions airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import anyio
import asyncer
import click
from dagger import Client, Config, Container, ExecError, File, ImageLayerCompression, QueryError, Secret
from dagger import Client, Config, Container, ExecError, File, ImageLayerCompression, Platform, QueryError, Secret
from more_itertools import chunked

if TYPE_CHECKING:
Expand Down Expand Up @@ -267,28 +267,30 @@ async def execute_concurrently(steps: List[Callable], concurrency=5):
return [task.value for task in tasks]


async def export_containers_to_tarball(
context: ConnectorContext, container_variants: List[Container], tar_file_name: Optional[str] = None
async def export_container_to_tarball(
context: ConnectorContext, container: Container, platform: Platform, tar_file_name: Optional[str] = None
) -> Tuple[Optional[File], Optional[Path]]:
"""Save the container image to the host filesystem as a tar archive.
Exports a list of container variants to a tarball file.
The list of container variants should be platform/os specific variants of the same container image.
Exports a container to a tarball file.
The tarball file is saved to the host filesystem in the directory specified by the host_image_export_dir_path attribute of the context.
Args:
context (ConnectorContext): The current connector context.
container_variants (List[Container]): The list of container variants to export.
container (Container) : The list of container variants to export.
platform (Platform): The platform of the container to export.
tar_file_name (Optional[str], optional): The name of the tar archive file. Defaults to None.
Returns:
Tuple[Optional[File], Optional[Path]]: A tuple with the file object holding the tar archive on the host and its path.
"""
tar_file_name = f"{slugify(context.connector.technical_name)}_{context.git_revision}.tar" if tar_file_name is None else tar_file_name
local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}")
export_success = await context.dagger_client.container().export(
str(local_path), platform_variants=container_variants, forced_compression=ImageLayerCompression.Gzip
tar_file_name = (
f"{slugify(context.connector.technical_name)}_{context.git_revision}_{platform.replace('/', '_')}.tar"
if tar_file_name is None
else tar_file_name
)
local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}")
export_success = await container.export(str(local_path), forced_compression=ImageLayerCompression.Gzip)
if export_success:
return context.dagger_client.host().file(str(local_path)), local_path
return None, None
Expand Down
4 changes: 3 additions & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "pipelines"
version = "2.10.1"
version = "2.10.2"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte <contact@airbyte.io>"]

Expand Down Expand Up @@ -37,6 +37,8 @@ freezegun = "^1.2.2"
pytest-cov = "^4.1.0"
pyinstaller = "^6.1.0"
poethepoet = "^0.24.2"
pytest = "^6.2.5"
pytest-mock = "^3.10.0"

[tool.poetry.scripts]
airbyte-ci = "pipelines.cli.airbyte_ci:airbyte_ci"
Expand Down
Loading

0 comments on commit 91bbdbf

Please sign in to comment.