Skip to content

Commit

Permalink
fix multi arch build
Browse files Browse the repository at this point in the history
  • Loading branch information
alafanechere committed Dec 1, 2023
1 parent c8bf267 commit b81816b
Show file tree
Hide file tree
Showing 7 changed files with 122 additions and 79 deletions.
7 changes: 6 additions & 1 deletion airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,10 @@ Build a single connector with a custom image tag:
Build a single connector for multiple architectures:
`airbyte-ci connectors --name=source-pokeapi build --architecture=linux/amd64 --architecture=linux/arm64`

You will get:
* `airbyte/source-pokeapi:dev-linux-amd64`
* `airbyte/source-pokeapi:dev-linux-arm64`

Build multiple connectors:
`airbyte-ci connectors --name=source-pokeapi --name=source-bigquery build`

Expand Down Expand Up @@ -305,7 +309,7 @@ flowchart TD

| Option | Multiple | Default value | Description |
| --------------------- | -------- | -------------- | ----------------------------------------------------------------- |
| `--architecture`/`-a` | True | Local platform | Defines for which architecture the connector image will be built. |
| `--architecture`/`-a` | True | Local platform | Defines for which architecture(s) the connector image will be built. |
| `--tag` | False | `dev` | Image tag for the built image. |


Expand Down Expand Up @@ -447,6 +451,7 @@ This command runs the Python tests for a airbyte-ci poetry package.
## Changelog
| Version | PR | Description |
| ------- | ---------------------------------------------------------- | --------------------------------------------------------------------------------------------------------- |
| 2.10.2 | [#33008](https://github.com/airbytehq/airbyte/pull/33008) | Fix local `connector build`. |
| 2.10.1 | [#32928](https://github.com/airbytehq/airbyte/pull/32928) | Fix BuildConnectorImages constructor. |
| 2.10.0 | [#32819](https://github.com/airbytehq/airbyte/pull/32819) | Add `--tag` option to connector build. |
| 2.9.0 | [#32816](https://github.com/airbytehq/airbyte/pull/32816) | Add `--architecture` option to connector build. |
Expand Down
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from __future__ import annotations

import json
from abc import ABC
from typing import List, Optional, Tuple

import docker
from dagger import Container, ExecError, Platform, QueryError
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.helpers.utils import export_containers_to_tarball
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import Step, StepResult, StepStatus


Expand Down Expand Up @@ -70,27 +70,28 @@ def image_name(self) -> Tuple:
return f"airbyte/{self.context.connector.technical_name}"

async def _run(self) -> StepResult:
container_variants = list(self.containers.values())
_, exported_tar_path = await export_containers_to_tarball(self.context, container_variants)
if not exported_tar_path:
return StepResult(
self,
StepStatus.FAILURE,
stderr=f"Failed to export the connector image {self.image_name}:{self.image_tag} to a tarball.",
)
try:
client = docker.from_env()
response = client.api.import_image_from_file(str(exported_tar_path), repository=self.image_name, tag=self.image_tag)
try:
image_sha = json.loads(response)["status"]
except (json.JSONDecodeError, KeyError):
loaded_images = []
multi_platforms = len(self.containers) > 1
for platform, container in self.containers.items():
_, exported_tar_path = await export_container_to_tarball(self.context, container, platform)
if not exported_tar_path:
return StepResult(
self,
StepStatus.FAILURE,
stderr=f"Failed to import the connector image {self.image_name}:{self.image_tag} to your Docker host: {response}",
stderr=f"Failed to export the connector image {self.image_name}:{self.image_tag} to a tarball.",
)
return StepResult(
self, StepStatus.SUCCESS, stdout=f"Loaded image {self.image_name}:{self.image_tag} to your Docker host ({image_sha})."
)
except docker.errors.DockerException as e:
return StepResult(self, StepStatus.FAILURE, stderr=f"Something went wrong while interacting with the local docker client: {e}")
try:
client = docker.from_env()
image_tag = f"{self.image_tag}-{platform.replace('/', '-')}" if multi_platforms else self.image_tag
full_image_name = f"{self.image_name}:{image_tag}"
with open(exported_tar_path, "rb") as tarball_content:
new_image = client.images.load(tarball_content.read())[0]
new_image.tag(self.image_name, tag=image_tag)
image_sha = new_image.id
loaded_images.append(full_image_name)
except docker.errors.DockerException as e:
return StepResult(
self, StepStatus.FAILURE, stderr=f"Something went wrong while interacting with the local docker client: {e}"
)

return StepResult(self, StepStatus.SUCCESS, stdout=f"Loaded image {','.join(loaded_images)} to your Docker host ({image_sha}).")
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.actions import secrets
from pipelines.dagger.actions.system import docker
from pipelines.helpers.utils import export_containers_to_tarball
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import StepResult, StepStatus


Expand Down Expand Up @@ -101,15 +101,18 @@ async def run_docker_build_dependent_steps(dist_tar_dir: Directory) -> List[Step
context.logger.info(f"This connector supports normalization: will build {normalization_image}.")
build_normalization_results = await BuildOrPullNormalization(context, normalization_image, LOCAL_BUILD_PLATFORM).run()
normalization_container = build_normalization_results.output_artifact
normalization_tar_file, _ = await export_containers_to_tarball(
context, [normalization_container], tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar"
normalization_tar_file, _ = await export_container_to_tarball(
context,
normalization_container,
LOCAL_BUILD_PLATFORM,
tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar",
)
step_results.append(build_normalization_results)
else:
normalization_tar_file = None

connector_container = build_connector_image_results.output_artifact[LOCAL_BUILD_PLATFORM]
connector_image_tar_file, _ = await export_containers_to_tarball(context, [connector_container])
connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container, LOCAL_BUILD_PLATFORM)

async with asyncer.create_task_group() as docker_build_dependent_group:
soon_integration_tests_results = docker_build_dependent_group.soonify(IntegrationTests(context).run)(
Expand Down
20 changes: 10 additions & 10 deletions airbyte-ci/connectors/pipelines/pipelines/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import anyio
import asyncer
import click
from dagger import Client, Config, Container, ExecError, File, ImageLayerCompression, QueryError, Secret
from dagger import Client, Config, Container, ExecError, File, ImageLayerCompression, Platform, QueryError, Secret
from more_itertools import chunked

if TYPE_CHECKING:
Expand Down Expand Up @@ -267,28 +267,28 @@ async def execute_concurrently(steps: List[Callable], concurrency=5):
return [task.value for task in tasks]


async def export_containers_to_tarball(
context: ConnectorContext, container_variants: List[Container], tar_file_name: Optional[str] = None
async def export_container_to_tarball(
context: ConnectorContext, container: Container, platform: Platform, tar_file_name: Optional[str] = None
) -> Tuple[Optional[File], Optional[Path]]:
"""Save the container image to the host filesystem as a tar archive.
Exports a list of container variants to a tarball file.
The list of container variants should be platform/os specific variants of the same container image.
Exports a container to a tarball file.
The tarball file is saved to the host filesystem in the directory specified by the host_image_export_dir_path attribute of the context.
Args:
context (ConnectorContext): The current connector context.
container_variants (List[Container]): The list of container variants to export.
container (Container) : The list of container variants to export.
platform (Platform): The platform of the container to export.
tar_file_name (Optional[str], optional): The name of the tar archive file. Defaults to None.
Returns:
Tuple[Optional[File], Optional[Path]]: A tuple with the file object holding the tar archive on the host and its path.
"""
tar_file_name = f"{slugify(context.connector.technical_name)}_{context.git_revision}.tar" if tar_file_name is None else tar_file_name
local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}")
export_success = await context.dagger_client.container().export(
str(local_path), platform_variants=container_variants, forced_compression=ImageLayerCompression.Gzip
tar_file_name = (
f"{slugify(context.connector.technical_name)}_{context.git_revision}_{platform}.tar" if tar_file_name is None else tar_file_name
)
local_path = Path(f"{context.host_image_export_dir_path}/{tar_file_name}")
export_success = await container.export(str(local_path), forced_compression=ImageLayerCompression.Gzip)
if export_success:
return context.dagger_client.host().file(str(local_path)), local_path
return None, None
Expand Down
2 changes: 1 addition & 1 deletion airbyte-ci/connectors/pipelines/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "pipelines"
version = "2.10.1"
version = "2.10.2"
description = "Packaged maintained by the connector operations team to perform CI for connectors' pipelines"
authors = ["Airbyte <contact@airbyte.io>"]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,12 @@
#

import os
from typing import Dict

import dagger
import docker
import pytest
from pipelines.airbyte_ci.connectors.build_image.steps import common
from pipelines.consts import BUILD_PLATFORMS
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.models.steps import StepStatus

pytestmark = [
Expand All @@ -20,29 +19,22 @@
@pytest.mark.slow
class TestLoadContainerToLocalDockerHost:
@pytest.fixture(scope="class")
def certified_connector(self, all_connectors):
def faker_connector(self, all_connectors):
for connector in all_connectors:
if connector.support_level == "certified":
if connector.technical_name == "source-faker":
return connector
pytest.skip("No certified connector found")
pytest.fail("Could not find the source-faker connector.")

@pytest.fixture
def built_containers(self, dagger_client, certified_connector) -> Dict[dagger.Platform, dagger.Container]:
return {
platform: dagger_client.container(platform=platform).from_(f'{certified_connector.metadata["dockerRepository"]}:latest')
for platform in BUILD_PLATFORMS
}

@pytest.fixture
def test_context(self, mocker, dagger_client, certified_connector, tmp_path):
def test_context(self, mocker, dagger_client, faker_connector, tmp_path):
return mocker.Mock(
secrets_to_mask=[], dagger_client=dagger_client, connector=certified_connector, host_image_export_dir_path=tmp_path
secrets_to_mask=[],
dagger_client=dagger_client,
connector=faker_connector,
host_image_export_dir_path=tmp_path,
git_revision="test-revision",
)

@pytest.fixture
def step(self, test_context, built_containers):
return common.LoadContainerToLocalDockerHost(test_context, built_containers)

@pytest.fixture
def bad_docker_host(self):
original_docker_host = os.environ.get("DOCKER_HOST")
Expand All @@ -52,39 +44,82 @@ def bad_docker_host(self):
else:
del os.environ["DOCKER_HOST"]

async def test_run(self, test_context, step):
@pytest.mark.parametrize(
"platforms",
[
[dagger.Platform("linux/arm64")],
[dagger.Platform("linux/amd64")],
[dagger.Platform("linux/amd64"), dagger.Platform("linux/arm64")],
],
)
async def test_run(self, dagger_client, test_context, platforms):
"""Test that the step runs successfully and that the image is loaded in the local docker host."""
built_containers = {
platform: dagger_client.container(platform=platform).from_(f'{test_context.connector.metadata["dockerRepository"]}:latest')
for platform in platforms
}
step = common.LoadContainerToLocalDockerHost(test_context, built_containers)

assert step.image_tag == "dev"
docker_client = docker.from_env()
step.image_tag = "test-load-container"
try:
docker_client.images.remove(f"{test_context.connector.metadata['dockerRepository']}:{step.image_tag}")
except docker.errors.ImageNotFound:
pass
for platform in platforms:
full_image_name = f"{test_context.connector.metadata['dockerRepository']}:{step.image_tag}-{platform.replace('/', '-')}"
try:
docker_client.images.remove(full_image_name, force=True)
except docker.errors.ImageNotFound:
pass
result = await step.run()
assert result.status is StepStatus.SUCCESS
docker_client.images.get(f"{test_context.connector.metadata['dockerRepository']}:{step.image_tag}")
docker_client.images.remove(f"{test_context.connector.metadata['dockerRepository']}:{step.image_tag}")
multi_platforms = len(platforms) > 1
for platform in platforms:
if multi_platforms:
full_image_name = f"{test_context.connector.metadata['dockerRepository']}:{step.image_tag}-{platform.replace('/', '-')}"
else:
full_image_name = f"{test_context.connector.metadata['dockerRepository']}:{step.image_tag}"
docker_client.images.get(full_image_name)
docker_client.containers.run(full_image_name, "spec")
docker_client.images.remove(full_image_name, force=True)

async def test_run_export_failure(self, step, mocker):
async def test_run_export_failure(self, dagger_client, test_context, mocker):
"""Test that the step fails if the export of the container fails."""
mocker.patch.object(common, "export_containers_to_tarball", return_value=(None, None))
built_containers = {
LOCAL_BUILD_PLATFORM: dagger_client.container(platform=LOCAL_BUILD_PLATFORM).from_(
f'{test_context.connector.metadata["dockerRepository"]}:latest'
)
}
step = common.LoadContainerToLocalDockerHost(test_context, built_containers)

mocker.patch.object(common, "export_container_to_tarball", return_value=(None, None))
result = await step.run()
assert result.status is StepStatus.FAILURE
assert "Failed to export the connector image" in result.stderr

async def test_run_connection_error(self, step, bad_docker_host):
async def test_run_connection_error(self, dagger_client, test_context, bad_docker_host):
"""Test that the step fails if the connection to the docker host fails."""
built_containers = {
LOCAL_BUILD_PLATFORM: dagger_client.container(platform=LOCAL_BUILD_PLATFORM).from_(
f'{test_context.connector.metadata["dockerRepository"]}:latest'
)
}
step = common.LoadContainerToLocalDockerHost(test_context, built_containers)
os.environ["DOCKER_HOST"] = bad_docker_host
result = await step.run()
assert result.status is StepStatus.FAILURE
assert "Something went wrong while interacting with the local docker client" in result.stderr

async def test_run_import_failure(self, step, mocker):
async def test_run_import_failure(self, dagger_client, test_context, mocker):
"""Test that the step fails if the docker import of the tar fails."""
built_containers = {
LOCAL_BUILD_PLATFORM: dagger_client.container(platform=LOCAL_BUILD_PLATFORM).from_(
f'{test_context.connector.metadata["dockerRepository"]}:latest'
)
}
step = common.LoadContainerToLocalDockerHost(test_context, built_containers)
mock_docker_client = mocker.MagicMock()
mock_docker_client.api.import_image_from_file.return_value = "bad response"
mock_docker_client.images.load.side_effect = docker.errors.DockerException("test error")
mocker.patch.object(common.docker, "from_env", return_value=mock_docker_client)
result = await step.run()
assert result.status is StepStatus.FAILURE
assert "Failed to import the connector image" in result.stderr
assert "Something went wrong while interacting with the local docker client: test error" in result.stderr
Loading

0 comments on commit b81816b

Please sign in to comment.