Skip to content

Commit

Permalink
connectors-ci: sanitize the publish logic and write tests for it (#26068
Browse files Browse the repository at this point in the history
)
  • Loading branch information
alafanechere committed May 15, 2023
1 parent eff127e commit be6dbae
Show file tree
Hide file tree
Showing 11 changed files with 576 additions and 212 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,32 +10,20 @@
import uuid
from typing import TYPE_CHECKING, List, Optional, Tuple

from ci_connector_ops.pipelines.consts import (
CI_CONNECTOR_OPS_SOURCE_PATH,
CI_CREDENTIALS_SOURCE_PATH,
CONNECTOR_TESTING_REQUIREMENTS,
DEFAULT_PYTHON_EXCLUDE,
PYPROJECT_TOML_FILE_PATH,
)
from ci_connector_ops.pipelines.utils import get_file_contents, slugify, with_exit_code
from dagger import CacheSharingMode, CacheVolume, Container, Directory, File, Platform, Secret

if TYPE_CHECKING:
from ci_connector_ops.pipelines.contexts import ConnectorContext, PipelineContext


PYPROJECT_TOML_FILE_PATH = "pyproject.toml"

CONNECTOR_TESTING_REQUIREMENTS = [
"pip==21.3.1",
"mccabe==0.6.1",
"flake8==4.0.1",
"pyproject-flake8==0.0.1a2",
"black==22.3.0",
"isort==5.6.4",
"pytest==6.2.5",
"coverage[toml]==6.3.1",
"pytest-custom_exit_code",
]

DEFAULT_PYTHON_EXCLUDE = ["**/.venv", "**/__pycache__"]
CI_CREDENTIALS_SOURCE_PATH = "tools/ci_credentials"
CI_CONNECTOR_OPS_SOURCE_PATH = "tools/ci_connector_ops"


def with_python_base(context: PipelineContext, python_image_name: str = "python:3.9-slim") -> Container:
"""Build a Python container with a cache volume for pip cache.
Expand Down
141 changes: 6 additions & 135 deletions tools/ci_connector_ops/ci_connector_ops/pipelines/bases.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@
from dataclasses import dataclass, field
from datetime import datetime
from enum import Enum
from typing import TYPE_CHECKING, Any, ClassVar, List, Optional, Tuple
from typing import TYPE_CHECKING, Any, ClassVar, List, Optional

import asyncer
from ci_connector_ops.pipelines.actions import environments
from ci_connector_ops.pipelines.utils import check_path_in_workdir, slugify, with_exit_code, with_stderr, with_stdout
from ci_connector_ops.utils import Connector, console
from dagger import CacheVolume, Container, Directory, QueryError
from ci_connector_ops.pipelines.consts import PYPROJECT_TOML_FILE_PATH
from ci_connector_ops.pipelines.utils import check_path_in_workdir, with_exit_code, with_stderr, with_stdout
from ci_connector_ops.utils import console
from dagger import Container, QueryError
from rich.console import Group
from rich.panel import Panel
from rich.style import Style
Expand Down Expand Up @@ -168,9 +168,7 @@ async def _run_tests_in_directory(self, connector_under_test: Container, test_di
Returns:
Tuple[StepStatus, Optional[str], Optional[str]]: Tuple of StepStatus, stderr and stdout.
"""
test_config = (
"pytest.ini" if await check_path_in_workdir(connector_under_test, "pytest.ini") else "/" + environments.PYPROJECT_TOML_FILE_PATH
)
test_config = "pytest.ini" if await check_path_in_workdir(connector_under_test, "pytest.ini") else "/" + PYPROJECT_TOML_FILE_PATH
if await check_path_in_workdir(connector_under_test, test_directory):
tester = connector_under_test.with_exec(
[
Expand Down Expand Up @@ -365,130 +363,3 @@ def print(self):

main_panel = Panel(Group(*to_render), title=main_panel_title, subtitle=duration_subtitle)
console.print(main_panel)


class GradleTask(Step, ABC):
"""
A step to run a Gradle task.
Attributes:
task_name (str): The Gradle task name to run.
title (str): The step title.
"""

DEFAULT_TASKS_TO_EXCLUDE = ["airbyteDocker"]
BIND_TO_DOCKER_HOST = True
gradle_task_name: ClassVar

# TODO more robust way to find all projects on which the task depends?
JAVA_BUILD_INCLUDE = [
"airbyte-api",
"airbyte-commons-cli",
"airbyte-commons-protocol",
"airbyte-commons",
"airbyte-config",
"airbyte-connector-test-harnesses",
"airbyte-db",
"airbyte-integrations/bases",
"airbyte-json-validation",
"airbyte-protocol",
"airbyte-test-utils",
"airbyte-config-oss",
]

SOURCE_BUILD_INCLUDE = [
"airbyte-integrations/connectors/source-jdbc",
"airbyte-integrations/connectors/source-relational-db",
]

DESTINATION_BUILD_INCLUDE = [
"airbyte-integrations/bases/bases-destination-jdbc",
"airbyte-integrations/connectors/destination-gcs",
"airbyte-integrations/connectors/destination-azure-blob-storage",
]

# These are the lines we remove from the connector gradle file to ignore specific tasks / plugins.
LINES_TO_REMOVE_FROM_GRADLE_FILE = [
# Do not build normalization with Gradle - we build normalization with Dagger in the BuildOrPullNormalization step.
"project(':airbyte-integrations:bases:base-normalization').airbyteDocker.output",
]

@property
def docker_service_name(self) -> str:
return slugify(f"gradle-{self.title}")

@property
def connector_java_build_cache(self) -> CacheVolume:
return self.context.dagger_client.cache_volume("connector_java_build_cache")

def get_related_connectors(self) -> List[Connector]:
"""Retrieve the list of related connectors.
This is used to include source code of non strict-encrypt connectors when running build for a strict-encrypt connector.
Returns:
List[Connector]: The list of related connectors.
"""
if self.context.connector.technical_name.endswith("-strict-encrypt"):
return [Connector(self.context.connector.technical_name.replace("-strict-encrypt", ""))]
if self.context.connector.technical_name == "source-file-secure":
return [Connector("source-file")]
return []

@property
def build_include(self) -> List[str]:
"""Retrieve the list of source code directory required to run a Java connector Gradle task.
The list is different according to the connector type.
Returns:
List[str]: List of directories or files to be mounted to the container to run a Java connector Gradle task.
"""
to_include = self.JAVA_BUILD_INCLUDE

if self.context.connector.connector_type == "source":
to_include += self.SOURCE_BUILD_INCLUDE
elif self.context.connector.connector_type == "destination":
to_include += self.DESTINATION_BUILD_INCLUDE
else:
raise ValueError(f"{self.context.connector.connector_type} is not supported")

with_related_connectors_source_code = to_include + [str(connector.code_directory) for connector in self.get_related_connectors()]
return with_related_connectors_source_code

async def _get_patched_connector_dir(self) -> Directory:
"""Patch the build.gradle file of the connector under test by removing the lines declared in LINES_TO_REMOVE_FROM_GRADLE_FILE.
Returns:
Directory: The patched connector directory
"""

gradle_file_content = await self.context.get_connector_dir(include=["build.gradle"]).file("build.gradle").contents()
patched_file_content = ""
for line in gradle_file_content.split("\n"):
if not any(line_to_remove in line for line_to_remove in self.LINES_TO_REMOVE_FROM_GRADLE_FILE):
patched_file_content += line + "\n"
return self.context.get_connector_dir().with_new_file("build.gradle", patched_file_content)

def _get_gradle_command(self, extra_options: Tuple[str] = ("--no-daemon", "--scan")) -> List:
command = (
["./gradlew"]
+ list(extra_options)
+ [f":airbyte-integrations:connectors:{self.context.connector.technical_name}:{self.gradle_task_name}"]
)
for task in self.DEFAULT_TASKS_TO_EXCLUDE:
command += ["-x", task]
return command

async def _run(self) -> StepResult:
connector_under_test = (
environments.with_gradle(
self.context, self.build_include, docker_service_name=self.docker_service_name, bind_to_docker_host=self.BIND_TO_DOCKER_HOST
)
.with_mounted_directory(str(self.context.connector.code_directory), await self._get_patched_connector_dir())
# Disable the Ryuk container because it needs privileged docker access that does not work:
.with_env_variable("TESTCONTAINERS_RYUK_DISABLED", "true")
.with_directory(f"{self.context.connector.code_directory}/secrets", self.context.secrets_dir)
.with_exec(self._get_gradle_command())
)

return await self.get_step_result(connector_under_test)
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@
#
"""This module groups factory like functions to dispatch builds steps according to the connector language."""

from __future__ import annotations

import platform
from typing import Optional, Tuple
from typing import TYPE_CHECKING, Optional, Tuple

import anyio
from ci_connector_ops.pipelines.bases import ConnectorReport, StepResult
from ci_connector_ops.pipelines.bases import ConnectorReport
from ci_connector_ops.pipelines.builds import common, java_connectors, python_connectors
from ci_connector_ops.pipelines.contexts import ConnectorContext
from ci_connector_ops.utils import ConnectorLanguage
Expand All @@ -16,6 +18,9 @@
BUILD_PLATFORMS = [Platform("linux/amd64"), Platform("linux/arm64")]
LOCAL_BUILD_PLATFORM = Platform(f"linux/{platform.machine()}")

if TYPE_CHECKING:
from ci_connector_ops.pipelines.bases import StepResult


class NoBuildStepForLanguageError(Exception):
pass
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@


from ci_connector_ops.pipelines.actions import environments
from ci_connector_ops.pipelines.bases import GradleTask, StepResult, StepStatus
from ci_connector_ops.pipelines.bases import StepResult, StepStatus
from ci_connector_ops.pipelines.builds.common import BuildConnectorImageBase
from ci_connector_ops.pipelines.gradle import GradleTask
from dagger import File, QueryError


Expand Down
20 changes: 20 additions & 0 deletions tools/ci_connector_ops/ci_connector_ops/pipelines/consts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
PYPROJECT_TOML_FILE_PATH = "pyproject.toml"

CONNECTOR_TESTING_REQUIREMENTS = [
"pip==21.3.1",
"mccabe==0.6.1",
"flake8==4.0.1",
"pyproject-flake8==0.0.1a2",
"black==22.3.0",
"isort==5.6.4",
"pytest==6.2.5",
"coverage[toml]==6.3.1",
"pytest-custom_exit_code",
]

DEFAULT_PYTHON_EXCLUDE = ["**/.venv", "**/__pycache__"]
CI_CREDENTIALS_SOURCE_PATH = "tools/ci_credentials"
CI_CONNECTOR_OPS_SOURCE_PATH = "tools/ci_connector_ops"

0 comments on commit be6dbae

Please sign in to comment.