Skip to content

Commit

Permalink
airbyte-ci: connector test steps can take extra parameters from CLI (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
alafanechere committed Jan 15, 2024
1 parent c404899 commit 8d27b6b
Show file tree
Hide file tree
Showing 21 changed files with 305 additions and 52 deletions.
14 changes: 14 additions & 0 deletions airbyte-ci/connectors/pipelines/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -264,11 +264,24 @@ flowchart TD
| `--fail-fast` | False | False | Abort after any tests fail, rather than continuing to run additional tests. Use this setting to confirm a known bug is fixed (or not), or when you only require a pass/fail result. |
| `--code-tests-only` | True | False | Skip any tests not directly related to code updates. For instance, metadata checks, version bump checks, changelog verification, etc. Use this setting to help focus on code quality during development. |
| `--concurrent-cat` | False | False | Make CAT tests run concurrently using pytest-xdist. Be careful about source or destination API rate limits. |
| `--<step-id>.<extra-parameter>=<extra-parameter-value>` | True | | You can pass extra parameters for specific test steps. More details in the extra parameters section below |

Note:

- The above options are implemented for Java connectors but may not be available for Python connectors. If an option is not supported, the pipeline will not fail but instead the 'default' behavior will be executed.

#### Extra parameters
You can pass extra parameters to the following steps:
* `unit`
* `integration`
* `acceptance`

This allows you to override the default parameters of these steps.
For example, you can only run the `test_read` test of the acceptance test suite with:
`airbyte-ci connectors --name=source-pokeapi test --acceptance.-k=test_read`
Here the `-k` parameter is passed to the pytest command running acceptance tests.
Please keep in mind that the extra parameters are not validated by the CLI: if you pass an invalid parameter, you'll face a late failure during the pipeline execution.

### <a id="connectors-build-command"></a>`connectors build` command

Run a build pipeline for one or multiple connectors and export the built docker image to the local docker host.
Expand Down Expand Up @@ -521,6 +534,7 @@ E.G.: running `pytest` on a specific test folder:

| Version | PR | Description |
| ------- | ---------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- |
| 3.2.0 | [#34050](https://github.com/airbytehq/airbyte/pull/34050) | Connector test steps can take extra parameters |
| 3.1.3 | [#34136](https://github.com/airbytehq/airbyte/pull/34136) | Fix issue where dagger excludes were not being properly applied |
| 3.1.2 | [#33972](https://github.com/airbytehq/airbyte/pull/33972) | Remove secrets scrubbing hack for --is-local and other small tweaks. |
| 3.1.1 | [#33979](https://github.com/airbytehq/airbyte/pull/33979) | Fix AssertionError on report existence again |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
from pipelines.consts import BUILD_PLATFORMS
from pipelines.dagger.actions import secrets
from pipelines.helpers.connectors.modifed import ConnectorWithModifiedFiles
from pipelines.helpers.execution.run_steps import RunStepOptions
from pipelines.helpers.github import update_commit_status_check
from pipelines.helpers.run_steps import RunStepOptions
from pipelines.helpers.slack import send_message_to_webhook
from pipelines.helpers.utils import METADATA_FILE_NAME
from pipelines.models.contexts.pipeline_context import PipelineContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#

import sys
from typing import List
from typing import Dict, List

import asyncclick as click
from pipelines import main_logger
Expand All @@ -13,12 +13,20 @@
from pipelines.airbyte_ci.connectors.test.pipeline import run_connector_test_pipeline
from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
from pipelines.consts import LOCAL_BUILD_PLATFORM, ContextState
from pipelines.helpers.execution import argument_parsing
from pipelines.helpers.execution.run_steps import RunStepOptions
from pipelines.helpers.github import update_global_commit_status_check_for_tests
from pipelines.helpers.run_steps import RunStepOptions
from pipelines.helpers.utils import fail_if_missing_docker_hub_creds
from pipelines.models.steps import STEP_PARAMS


@click.command(cls=DaggerPipelineCommand, help="Test all the selected connectors.")
@click.command(
cls=DaggerPipelineCommand,
help="Test all the selected connectors.",
context_settings=dict(
ignore_unknown_options=True,
),
)
@click.option(
"--code-tests-only",
is_flag=True,
Expand Down Expand Up @@ -47,13 +55,17 @@
type=click.Choice([step_id.value for step_id in CONNECTOR_TEST_STEP_ID]),
help="Skip a step by name. Can be used multiple times to skip multiple steps.",
)
@click.argument(
"extra_params", nargs=-1, type=click.UNPROCESSED, callback=argument_parsing.build_extra_params_mapping(CONNECTOR_TEST_STEP_ID)
)
@click.pass_context
async def test(
ctx: click.Context,
code_tests_only: bool,
fail_fast: bool,
concurrent_cat: bool,
skip_step: List[str],
extra_params: Dict[CONNECTOR_TEST_STEP_ID, STEP_PARAMS],
) -> bool:
"""Runs a test pipeline for the selected connectors.
Expand All @@ -76,8 +88,8 @@ async def test(
run_step_options = RunStepOptions(
fail_fast=fail_fast,
skip_steps=[CONNECTOR_TEST_STEP_ID(step_id) for step_id in skip_step],
step_params=extra_params,
)

connectors_tests_contexts = [
ConnectorContext(
pipeline_name=f"Testing connector {connector.technical_name}",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
#
"""This module groups factory like functions to dispatch tests steps according to the connector under test language."""

from __future__ import annotations

from typing import TYPE_CHECKING

import anyio
from connector_ops.utils import ConnectorLanguage # type: ignore
Expand All @@ -12,7 +15,11 @@
from pipelines.airbyte_ci.connectors.test.steps import java_connectors, python_connectors
from pipelines.airbyte_ci.connectors.test.steps.common import QaChecks, VersionFollowsSemverCheck, VersionIncrementCheck
from pipelines.airbyte_ci.metadata.pipeline import MetadataValidation
from pipelines.helpers.run_steps import STEP_TREE, StepToRun, run_steps
from pipelines.helpers.execution.run_steps import StepToRun, run_steps

if TYPE_CHECKING:

from pipelines.helpers.execution.run_steps import STEP_TREE

LANGUAGE_MAPPING = {
"get_test_steps": {
Expand All @@ -30,7 +37,7 @@ def get_test_steps(context: ConnectorContext) -> STEP_TREE:
context (ConnectorContext): The current connector context.
Returns:
List[StepResult]: The list of tests steps.
STEP_TREE: The list of tests steps.
"""
if _get_test_steps := LANGUAGE_MAPPING["get_test_steps"].get(context.connector.language):
return _get_test_steps(context)
Expand All @@ -43,23 +50,25 @@ async def run_connector_test_pipeline(context: ConnectorContext, semaphore: anyi
"""
Compute the steps to run for a connector test pipeline.
"""
all_steps_to_run: STEP_TREE = []

steps_to_run = get_test_steps(context)
all_steps_to_run += get_test_steps(context)

if not context.code_tests_only:
steps_to_run += [
static_analysis_steps_to_run = [
[
StepToRun(id=CONNECTOR_TEST_STEP_ID.METADATA_VALIDATION, step=MetadataValidation(context)),
StepToRun(id=CONNECTOR_TEST_STEP_ID.VERSION_FOLLOW_CHECK, step=VersionFollowsSemverCheck(context)),
StepToRun(id=CONNECTOR_TEST_STEP_ID.VERSION_INC_CHECK, step=VersionIncrementCheck(context)),
StepToRun(id=CONNECTOR_TEST_STEP_ID.QA_CHECKS, step=QaChecks(context)),
]
]
all_steps_to_run += static_analysis_steps_to_run

async with semaphore:
async with context:
result_dict = await run_steps(
runnables=steps_to_run,
runnables=all_steps_to_run,
options=context.run_step_options,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from pipelines.dagger.actions import secrets
from pipelines.dagger.containers import internal_tools
from pipelines.helpers.utils import METADATA_FILE_NAME
from pipelines.models.steps import Step, StepResult, StepStatus
from pipelines.models.steps import STEP_PARAMS, Step, StepResult, StepStatus


class VersionCheck(Step, ABC):
Expand Down Expand Up @@ -193,21 +193,33 @@ class AcceptanceTests(Step):
CONTAINER_TEST_INPUT_DIRECTORY = "/test_input"
CONTAINER_SECRETS_DIRECTORY = "/test_input/secrets"
skipped_exit_code = 5
accept_extra_params = True

@property
def default_params(self) -> STEP_PARAMS:
"""Default pytest options.
Returns:
dict: The default pytest options.
"""
return super().default_params | {
"-ra": [], # Show extra test summary info in the report for all but the passed tests
"--disable-warnings": [], # Disable warnings in the pytest report
"--durations": ["3"], # Show the 3 slowest tests in the report
}

@property
def base_cat_command(self) -> List[str]:
command = [
"python",
"-m",
"pytest",
"--disable-warnings",
"--durations=3", # Show the 3 slowest tests in the report
"-ra", # Show extra test summary info in the report for all but the passed tests
"-p", # Load the connector_acceptance_test plugin
"connector_acceptance_test.plugin",
"--acceptance-test-config",
self.CONTAINER_TEST_INPUT_DIRECTORY,
]

if self.concurrent_test_run:
command += ["--numprocesses=auto"] # Using pytest-xdist to run tests in parallel, auto means using all available cores
return command
Expand All @@ -232,7 +244,7 @@ async def get_cat_command(self, connector_dir: Directory) -> List[str]:
if "integration_tests" in await connector_dir.entries():
if "acceptance.py" in await connector_dir.directory("integration_tests").entries():
cat_command += ["-p", "integration_tests.acceptance"]
return cat_command
return cat_command + self.params_as_cli_options

async def _run(self, connector_under_test_container: Container) -> StepResult:
"""Run the acceptance test suite on a connector dev image. Build the connector acceptance test image if the tag is :dev.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,24 +21,30 @@
from pipelines.airbyte_ci.steps.gradle import GradleTask
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.actions.system import docker
from pipelines.helpers.run_steps import StepToRun
from pipelines.helpers.execution.run_steps import StepToRun
from pipelines.helpers.utils import export_container_to_tarball
from pipelines.models.steps import StepResult, StepStatus
from pipelines.models.steps import STEP_PARAMS, StepResult, StepStatus

if TYPE_CHECKING:
from typing import Callable, Dict, List, Optional

from pipelines.helpers.run_steps import RESULTS_DICT, STEP_TREE
from pipelines.helpers.execution.run_steps import RESULTS_DICT, STEP_TREE


class IntegrationTests(GradleTask):
"""A step to run integrations tests for Java connectors using the integrationTestJava Gradle task."""

title = "Java Connector Integration Tests"
gradle_task_name = "integrationTestJava -x buildConnectorImage -x assemble"
gradle_task_name = "integrationTestJava"
mount_connector_secrets = True
bind_to_docker_host = True

@property
def default_params(self) -> STEP_PARAMS:
return super().default_params | {
"-x": ["buildConnectorImage", "assemble"], # Exclude the buildConnectorImage and assemble tasks
}

async def _load_normalization_image(self, normalization_tar_file: File) -> None:
normalization_image_tag = f"{self.context.connector.normalization_repository}:dev"
self.context.logger.info("Load the normalization image to the docker host.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@
from pipelines.airbyte_ci.connectors.test.steps.common import AcceptanceTests, CheckBaseImageIsUsed
from pipelines.consts import LOCAL_BUILD_PLATFORM
from pipelines.dagger.actions import secrets
from pipelines.helpers.run_steps import STEP_TREE, StepToRun
from pipelines.models.steps import Step, StepResult
from pipelines.helpers.execution.run_steps import STEP_TREE, StepToRun
from pipelines.models.steps import STEP_PARAMS, Step, StepResult


class PytestStep(Step, ABC):
Expand All @@ -31,6 +31,18 @@ class PytestStep(Step, ABC):

skipped_exit_code = 5
bind_to_docker_host = False
accept_extra_params = True

@property
def default_params(self) -> STEP_PARAMS:
"""Default pytest options.
Returns:
dict: The default pytest options.
"""
return super().default_params | {
"-s": [], # Disable capturing stdout/stderr in pytest
}

@property
@abstractmethod
Expand All @@ -43,15 +55,6 @@ def extra_dependencies_names(self) -> Sequence[str]:
return ("dev",)
return ("dev", "tests")

@property
def additional_pytest_options(self) -> List[str]:
"""Theses options are added to the pytest command.
Returns:
List[str]: The additional pytest options.
"""
return []

async def _run(self, connector_under_test: Container) -> StepResult:
"""Run all pytest tests declared in the test directory of the connector code.
Expand Down Expand Up @@ -83,7 +86,7 @@ def get_pytest_command(self, test_config_file_name: str) -> List[str]:
Returns:
List[str]: The pytest command to run.
"""
cmd = ["pytest", "-s", self.test_directory_name, "-c", test_config_file_name] + self.additional_pytest_options
cmd = ["pytest", self.test_directory_name, "-c", test_config_file_name] + self.params_as_cli_options
if self.context.connector.is_using_poetry:
return ["poetry", "run"] + cmd
return cmd
Expand Down Expand Up @@ -174,18 +177,16 @@ class UnitTests(PytestStep):
MINIMUM_COVERAGE_FOR_CERTIFIED_CONNECTORS = 90

@property
def additional_pytest_options(self) -> List[str]:
def default_params(self) -> STEP_PARAMS:
"""Make sure the coverage computation is run for the unit tests.
Fail if the coverage is under 90% for certified connectors.
Returns:
List[str]: The additional pytest options to run coverage reports.
dict: The default pytest options.
"""
coverage_options = ["--cov", self.context.connector.technical_name.replace("-", "_")]
coverage_options = {"--cov": [self.context.connector.technical_name.replace("-", "_")]}
if self.context.connector.support_level == "certified":
coverage_options += ["--cov-fail-under", str(self.MINIMUM_COVERAGE_FOR_CERTIFIED_CONNECTORS)]

return super().additional_pytest_options + coverage_options
coverage_options["--cov-fail-under"] = [str(self.MINIMUM_COVERAGE_FOR_CERTIFIED_CONNECTORS)]
return super().default_params | coverage_options


class IntegrationTests(PytestStep):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH, INTERNAL_TOOL_PATHS
from pipelines.dagger.actions.python.common import with_pip_packages
from pipelines.dagger.containers.python import with_python_base
from pipelines.helpers.run_steps import STEP_TREE, StepToRun, run_steps
from pipelines.helpers.execution.run_steps import STEP_TREE, StepToRun, run_steps
from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable
from pipelines.models.reports import Report
from pipelines.models.steps import MountPath, Step, StepResult
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from pipelines.consts import AMAZONCORRETTO_IMAGE
from pipelines.dagger.actions import secrets
from pipelines.helpers.utils import sh_dash_c
from pipelines.models.steps import Step, StepResult
from pipelines.models.steps import STEP_PARAMS, Step, StepResult


class GradleTask(Step, ABC):
Expand All @@ -27,14 +27,23 @@ class GradleTask(Step, ABC):

context: ConnectorContext

DEFAULT_GRADLE_TASK_OPTIONS = ("--no-daemon", "--no-watch-fs", "--scan", "--build-cache", "--console=plain")
LOCAL_MAVEN_REPOSITORY_PATH = "/root/.m2"
GRADLE_DEP_CACHE_PATH = "/root/gradle-cache"
GRADLE_HOME_PATH = "/root/.gradle"

STATIC_GRADLE_TASK_OPTIONS = ("--no-daemon", "--no-watch-fs")
gradle_task_name: ClassVar[str]
bind_to_docker_host: ClassVar[bool] = False
mount_connector_secrets: ClassVar[bool] = False
accept_extra_params = True

@property
def default_params(self) -> STEP_PARAMS:
return super().default_params | {
"-Ds3BuildCachePrefix": [self.context.connector.technical_name], # Set the S3 build cache prefix.
"--build-cache": [], # Enable the gradle build cache.
"--scan": [], # Enable the gradle build scan.
"--console": ["plain"], # Disable the gradle rich console.
}

@property
def dependency_cache_volume(self) -> CacheVolume:
Expand All @@ -56,7 +65,7 @@ def build_include(self) -> List[str]:
]

def _get_gradle_command(self, task: str, *args: Any) -> str:
return f"./gradlew {' '.join(self.DEFAULT_GRADLE_TASK_OPTIONS + args)} {task}"
return f"./gradlew {' '.join(self.STATIC_GRADLE_TASK_OPTIONS + args)} {task}"

async def _run(self, *args: Any, **kwargs: Any) -> StepResult:
include = [
Expand Down Expand Up @@ -191,7 +200,7 @@ async def _run(self, *args: Any, **kwargs: Any) -> StepResult:
# Warm the gradle cache.
f"(rsync -a --stats --mkpath {self.GRADLE_DEP_CACHE_PATH}/ {self.GRADLE_HOME_PATH} || true)",
# Run the gradle task.
self._get_gradle_command(connector_task, f"-Ds3BuildCachePrefix={self.context.connector.technical_name}"),
self._get_gradle_command(connector_task, *self.params_as_cli_options),
]
)
)
Expand Down
Empty file.
Loading

0 comments on commit 8d27b6b

Please sign in to comment.