Skip to content

Commit

Permalink
Remove old code
Browse files Browse the repository at this point in the history
  • Loading branch information
bnchrch committed Nov 3, 2023
1 parent ab950d3 commit 258b66d
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 269 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,11 @@
from pipelines import main_logger
from pipelines.airbyte_ci.connectors.context import ConnectorContext
from pipelines.airbyte_ci.connectors.pipeline import run_connectors_pipelines
from pipelines.airbyte_ci.connectors.test.new_pipeline import new_run_connector_test_pipeline
from pipelines.airbyte_ci.connectors.test.pipeline import run_connector_test_pipeline
from pipelines.cli.dagger_pipeline_command import DaggerPipelineCommand
from pipelines.consts import ContextState
from pipelines.helpers.github import update_global_commit_status_check_for_tests
from pipelines.helpers.steps import RunStepOptions, run_steps
from pipelines.helpers.steps import RunStepOptions
from pipelines.helpers.utils import fail_if_missing_docker_hub_creds


Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -14,94 +14,71 @@
from pipelines.airbyte_ci.connectors.test.steps import java_connectors, python_connectors
from pipelines.airbyte_ci.connectors.test.steps.common import QaChecks, VersionFollowsSemverCheck, VersionIncrementCheck
from pipelines.airbyte_ci.metadata.pipeline import MetadataValidation
from pipelines.helpers.steps import StepToRun, new_run_steps
from pipelines.models.steps import StepResult

LANGUAGE_MAPPING = {
"run_all_tests": {
ConnectorLanguage.PYTHON: python_connectors.run_all_tests,
ConnectorLanguage.LOW_CODE: python_connectors.run_all_tests,
ConnectorLanguage.JAVA: java_connectors.run_all_tests,
"get_test_steps": {
ConnectorLanguage.PYTHON: python_connectors.get_test_steps,
ConnectorLanguage.LOW_CODE: python_connectors.get_test_steps,
ConnectorLanguage.JAVA: java_connectors.get_test_steps,
},
}


async def run_metadata_validation(context: ConnectorContext) -> List[StepResult]:
"""Run the metadata validation on a connector.
Args:
context (ConnectorContext): The current connector context.
Returns:
List[StepResult]: The results of the metadata validation steps.
"""
return [await MetadataValidation(context).run()]


async def run_version_checks(context: ConnectorContext) -> List[StepResult]:
"""Run the version checks on a connector.
Args:
context (ConnectorContext): The current connector context.
Returns:
List[StepResult]: The results of the version checks steps.
"""
return [await VersionFollowsSemverCheck(context).run(), await VersionIncrementCheck(context).run()]


async def run_qa_checks(context: ConnectorContext) -> List[StepResult]:
"""Run the QA checks on a connector.
Args:
context (ConnectorContext): The current connector context.
Returns:
List[StepResult]: The results of the QA checks steps.
"""
return [await QaChecks(context).run()]


async def run_all_tests(context: ConnectorContext) -> List[StepResult]:
"""Run all the tests steps according to the connector language.
def get_test_steps(context: ConnectorContext) -> List[StepToRun]:
"""Get all the tests steps according to the connector language.
Args:
context (ConnectorContext): The current connector context.
Returns:
List[StepResult]: The results of the tests steps.
List[StepResult]: The list of tests steps.
"""
if _run_all_tests := LANGUAGE_MAPPING["run_all_tests"].get(context.connector.language):
return await _run_all_tests(context)
if _get_test_steps := LANGUAGE_MAPPING["get_test_steps"].get(context.connector.language):
return _get_test_steps(context)
else:
context.logger.warning(f"No tests defined for connector language {context.connector.language}!")
return []


async def run_connector_test_pipeline(context: ConnectorContext, semaphore: anyio.Semaphore) -> ConnectorReport:
"""Run a test pipeline for a single connector.
async def run_connector_test_pipeline(context: ConnectorContext, semaphore: anyio.Semaphore):
"""
Compute the steps to run for a connector test pipeline.
"""

"""
TODO: main
- Update java connectors
- update metadata_service run_steps use
- clean up todos
- replace old functions with new functions
- fail fast?
NEXT PR
- add extra_args_support
"""

A visual DAG can be found on the README.md file of the pipelines modules.
steps_to_run = get_test_steps(context)

Args:
context (ConnectorContext): The initialized connector context.
if not context.code_tests_only:
steps_to_run += [
[
StepToRun(id="metadata_validation", step=MetadataValidation(context)),
StepToRun(id="version_follow_check", step=VersionFollowsSemverCheck(context)),
StepToRun(id="version_inc_check", step=VersionIncrementCheck(context)),
StepToRun(id="qa_checks", step=QaChecks(context)),
]
]

Returns:
ConnectorReport: The test reports holding tests results.
"""
async with semaphore:
async with context:
result_dict = await new_run_steps(
runnables=steps_to_run,
options=context.run_step_options,
)

async with asyncer.create_task_group() as task_group:
tasks = [
task_group.soonify(run_all_tests)(context),
]
if not context.code_tests_only:
tasks += [
task_group.soonify(run_metadata_validation)(context),
task_group.soonify(run_version_checks)(context),
task_group.soonify(run_qa_checks)(context),
]
results = list(itertools.chain(*(task.value for task in tasks)))
results = list(result_dict.values())
context.report = ConnectorReport(context, steps_results=results, name="TEST RESULTS")

return context.report

Original file line number Diff line number Diff line change
Expand Up @@ -136,64 +136,3 @@ def get_test_steps(context: ConnectorContext) -> List[StepToRun]:
StepToRun(id="unit", step=UnitTests(context)),
_get_acceptance_test_steps(context),
]


async def run_all_tests(context: ConnectorContext) -> List[StepResult]:
"""Run all tests for a Java connectors.
- Build the normalization image if the connector supports it.
- Run unit tests with Gradle.
- Build connector image with Gradle.
- Run integration and acceptance test in parallel using the built connector and normalization images.
Args:
context (ConnectorContext): The current connector context.
Returns:
List[StepResult]: The results of all the tests steps.
"""
step_results = []

build_distribution_tar_result = await BuildConnectorDistributionTar(context).run()
step_results.append(build_distribution_tar_result)
if build_distribution_tar_result.status is StepStatus.FAILURE:
return step_results

dist_tar_dir = build_distribution_tar_result.output_artifact.directory(dist_tar_directory_path(context))

async def run_docker_build_dependent_steps(dist_tar_dir: Directory) -> List[StepResult]:
step_results = []
build_connector_image_results = await BuildConnectorImages(context, LOCAL_BUILD_PLATFORM).run(dist_tar_dir)
step_results.append(build_connector_image_results)
if build_connector_image_results.status is StepStatus.FAILURE:
return step_results

if context.connector.supports_normalization:
normalization_image = f"{context.connector.normalization_repository}:dev"
context.logger.info(f"This connector supports normalization: will build {normalization_image}.")
build_normalization_results = await BuildOrPullNormalization(context, normalization_image, LOCAL_BUILD_PLATFORM).run()
normalization_container = build_normalization_results.output_artifact
normalization_tar_file, _ = await export_container_to_tarball(
context, normalization_container, tar_file_name=f"{context.connector.normalization_repository}_{context.git_revision}.tar"
)
step_results.append(build_normalization_results)
else:
normalization_tar_file = None

connector_container = build_connector_image_results.output_artifact[LOCAL_BUILD_PLATFORM]
connector_image_tar_file, _ = await export_container_to_tarball(context, connector_container)

async with asyncer.create_task_group() as docker_build_dependent_group:
soon_integration_tests_results = docker_build_dependent_group.soonify(IntegrationTests(context).run)(
connector_tar_file=connector_image_tar_file, normalization_tar_file=normalization_tar_file
)
soon_cat_results = docker_build_dependent_group.soonify(AcceptanceTests(context, True).run)(connector_container)

step_results += [soon_cat_results.value, soon_integration_tests_results.value]
return step_results

async with asyncer.create_task_group() as test_task_group:
soon_unit_tests_result = test_task_group.soonify(UnitTests(context).run)()
soon_docker_build_dependent_steps_results = test_task_group.soonify(run_docker_build_dependent_steps)(dist_tar_dir)

return step_results + [soon_unit_tests_result.value] + soon_docker_build_dependent_steps_results.value
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pipelines.consts import DOCS_DIRECTORY_ROOT_PATH, INTERNAL_TOOL_PATHS
from pipelines.dagger.actions.python.common import with_pip_packages
from pipelines.dagger.containers.python import with_python_base
from pipelines.helpers.steps import run_steps
from pipelines.helpers.steps import StepToRun, run_steps
from pipelines.helpers.utils import DAGGER_CONFIG, get_secret_host_variable
from pipelines.models.reports import Report
from pipelines.models.steps import MountPath, Step, StepResult
Expand Down Expand Up @@ -169,7 +169,17 @@ async def run_metadata_orchestrator_deploy_pipeline(
metadata_pipeline_context.dagger_client = dagger_client.pipeline(metadata_pipeline_context.pipeline_name)

async with metadata_pipeline_context:
steps = [TestOrchestrator(context=metadata_pipeline_context), DeployOrchestrator(context=metadata_pipeline_context)]
steps = [
StepToRun(
id="test_orchestrator",
step=TestOrchestrator(context=metadata_pipeline_context),
),
StepToRun(
id="deploy_orchestrator",
step=DeployOrchestrator(context=metadata_pipeline_context),
depends_on=["test_orchestrator"],
),
]
steps_results = await run_steps(steps)
metadata_pipeline_context.report = Report(
pipeline_context=metadata_pipeline_context, steps_results=steps_results, name="METADATA ORCHESTRATOR DEPLOY RESULTS"
Expand Down
Loading

0 comments on commit 258b66d

Please sign in to comment.