Skip to content

Commit

Permalink
Merge branch 'master' into vitalii/1392_test_file_formats
Browse files Browse the repository at this point in the history
# Conflicts:
#	airbyte-integrations/connectors/source-file/build.gradle
#	airbyte-integrations/connectors/source-file/integration_tests/integration_source_test.py
#	airbyte-integrations/connectors/source-file/setup.py
#	docs/integrations/sources/file.md
  • Loading branch information
vitaliizazmic committed Jan 28, 2021
2 parents 099bd6b + 099c22e commit e8ea7a1
Show file tree
Hide file tree
Showing 46 changed files with 4,425 additions and 820 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/test-command.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ jobs:
GH_INTEGRATION_TEST_CREDS: ${{ secrets.GH_INTEGRATION_TEST_CREDS }}
GOOGLE_ANALYTICS_TEST_CREDS: ${{ secrets.GOOGLE_ANALYTICS_TEST_CREDS }}
GOOGLE_ANALYTICS_TEST_TRACKING_ID: ${{ secrets.GOOGLE_ANALYTICS_TEST_TRACKING_ID }}
GOOGLE_CLOUD_STORAGE_TEST_CREDS: ${{ secrets.GOOGLE_CLOUD_STORAGE_TEST_CREDS }}
GREENHOUSE_TEST_CREDS: ${{ secrets.GREENHOUSE_TEST_CREDS }}
GSHEETS_INTEGRATION_TESTS_CREDS: ${{ secrets.GSHEETS_INTEGRATION_TESTS_CREDS }}
HUBSPOT_INTEGRATION_TESTS_CREDS: ${{ secrets.HUBSPOT_INTEGRATION_TESTS_CREDS }}
Expand All @@ -73,6 +74,7 @@ jobs:
SOURCE_MARKETO_SINGER_INTEGRATION_TEST_CONFIG: ${{ secrets.SOURCE_MARKETO_SINGER_INTEGRATION_TEST_CONFIG }}
SOURCE_RECURLY_INTEGRATION_TEST_CREDS: ${{ secrets.SOURCE_RECURLY_INTEGRATION_TEST_CREDS }}
STRIPE_INTEGRATION_TEST_CREDS: ${{ secrets.STRIPE_INTEGRATION_TEST_CREDS }}
TEMPO_INTEGRATION_TEST_CREDS: ${{ secrets.TEMPO_INTEGRATION_TEST_CREDS }}
TWILIO_TEST_CREDS: ${{ secrets.TWILIO_TEST_CREDS }}
ZENDESK_SECRETS_CREDS: ${{ secrets.ZENDESK_SECRETS_CREDS }}
ZOOM_INTEGRATION_TEST_CREDS: ${{ secrets.ZOOM_INTEGRATION_TEST_CREDS }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
"sourceDefinitionId": "00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c",
"name": "Looker",
"dockerRepository": "airbyte/source-looker",
"dockerImageTag": "0.1.0",
"dockerImageTag": "0.1.1",
"documentationUrl": "https://hub.docker.com/r/airbyte/source-looker"
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
"sourceDefinitionId": "778daa7c-feaf-4db6-96f3-70fd645acc77",
"name": "File",
"dockerRepository": "airbyte/source-file",
"dockerImageTag": "0.1.7",
"dockerImageTag": "0.1.8",
"documentationUrl": "https://hub.docker.com/r/airbyte/source-file"
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
"sourceDefinitionId": "b1892b11-788d-44bd-b9ec-3a436f7b54ce",
"name": "Shopify",
"dockerRepository": "airbyte/source-shopify-singer",
"dockerImageTag": "0.1.6",
"dockerImageTag": "0.1.7",
"documentationUrl": "https://hub.docker.com/r/airbyte/source-shopify-singer"
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
- sourceDefinitionId: 778daa7c-feaf-4db6-96f3-70fd645acc77
name: File
dockerRepository: airbyte/source-file
dockerImageTag: 0.1.7
dockerImageTag: 0.1.8
documentationUrl: https://hub.docker.com/r/airbyte/source-file
- sourceDefinitionId: fdc8b827-3257-4b33-83cc-106d234c34d4
name: Google Adwords
Expand Down Expand Up @@ -86,7 +86,7 @@
- sourceDefinitionId: b1892b11-788d-44bd-b9ec-3a436f7b54ce
name: Shopify
dockerRepository: airbyte/source-shopify-singer
dockerImageTag: 0.1.6
dockerImageTag: 0.1.7
documentationUrl: https://hub.docker.com/r/airbyte/source-shopify-singer
- sourceDefinitionId: 9845d17a-45f1-4070-8a60-50914b1c8e2b
name: HTTP Request
Expand Down Expand Up @@ -161,7 +161,7 @@
- sourceDefinitionId: 00405b19-9768-4e0c-b1ae-9fc2ee2b2a8c
name: Looker
dockerRepository: airbyte/source-looker
dockerImageTag: 0.1.0
dockerImageTag: 0.1.1
documentationUrl: https://hub.docker.com/r/airbyte/source-looker
- sourceDefinitionId: ed799e2b-2158-4c66-8da4-b40fe63bc72a
name: Plaid
Expand Down
2 changes: 1 addition & 1 deletion airbyte-integrations/connectors/source-file/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ COPY $CODE_PATH ./$CODE_PATH
COPY setup.py ./
RUN pip install ".[main]"

LABEL io.airbyte.version=0.1.7
LABEL io.airbyte.version=0.1.8
LABEL io.airbyte.name=airbyte/source-file
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,6 @@ COPY $CODE_PATH $CODE_PATH
COPY source_file/*.json $CODE_PATH
COPY setup.py ./

RUN pip install ".[integration_tests]"
RUN pip install ".[tests]"

WORKDIR /airbyte
4 changes: 3 additions & 1 deletion airbyte-integrations/connectors/source-file/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,13 @@ airbytePython {
moduleDirectory 'source_file'
}

task("customIntegrationTesPython", type: PythonTask, dependsOn: installTestReqs){
task("customIntegrationTestPython", type: PythonTask, dependsOn: installTestReqs){
module = "pytest"
command = "-s integration_tests"
}

integrationTest.dependsOn("customIntegrationTestPython")

dependencies {
implementation files(project(':airbyte-integrations:bases:base-python').airbyteDocker.outputs)
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
SOFTWARE.
"""

from .integration_source_test import TestSourceFile
from .standard_source_test import SourceFileStandardTest

__all__ = ["SourceFileStandardTest", "TestSourceFile"]
__all__ = ["SourceFileStandardTest"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
"""
MIT License
Copyright (c) 2020 Airbyte
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""

import json
from pathlib import Path

import pytest
from source_file.client import Client

HERE = Path(__file__).parent.absolute()


def check_read(config, expected_columns=10, expected_rows=42):
client = Client(**config)
rows = list(client.read())
assert len(rows) == expected_rows
assert len(rows[0]) == expected_columns


@pytest.mark.parametrize(
"provider_name,file_path,file_format",
[
("ssh", "files/test.csv", "csv"),
("scp", "files/test.csv", "csv"),
("sftp", "files/test.csv", "csv"),
("ssh", "files/test.csv.gz", "csv"), # text in binary
("ssh", "files/test.pkl", "pickle"), # binary
("sftp", "files/test.pkl.gz", "pickle"), # binary in binary
],
)
def test__read_from_private_ssh(provider_config, provider_name, file_path, file_format):
client = Client(dataset_name="output", format=file_format, url=file_path, provider=provider_config(provider_name))
result = next(client.read())
assert result == {"header1": "text", "header2": 1, "header3": 0.2}


@pytest.mark.parametrize(
"provider_name,file_path,file_format",
[
("ssh", "files/file_does_not_exist.csv", "csv"),
("gcs", "gs://gcp-public-data-landsat/file_does_not_exist.csv", "csv"),
],
)
def test__read_file_not_found(provider_config, provider_name, file_path, file_format):
client = Client(dataset_name="output", format=file_format, url=file_path, provider=provider_config(provider_name))
with pytest.raises(FileNotFoundError):
next(client.read())


@pytest.mark.parametrize(
"provider_name, file_path, file_format",
[
("ssh", "files/test.csv", "csv"),
("ssh", "files/test.pkl", "pickle"),
("sftp", "files/test.pkl.gz", "pickle"),
],
)
def test__streams_from_ssh_providers(provider_config, provider_name, file_path, file_format):
client = Client(dataset_name="output", format=file_format, url=file_path, provider=provider_config(provider_name))
streams = list(client.streams)
assert len(streams) == 1
assert streams[0].json_schema["properties"] == {
"header1": {"type": "string"},
"header2": {"type": "number"},
"header3": {"type": "number"},
}


@pytest.mark.parametrize(
"storage_provider, url, columns_nb, separator, has_header",
[
# epidemiology csv
("HTTPS", "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv", 10, ",", True),
("HTTPS", "storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv", 10, ",", True),
("local", "injected by tests", 10, ",", True),
# landsat compressed csv
("GCS", "gs://gcp-public-data-landsat/index.csv.gz", 18, ",", True),
("GCS", "gs://gcp-public-data-landsat/index.csv.gz", 18, ",", True),
# GDELT csv
("S3", "s3://gdelt-open-data/events/20190914.export.csv", 58, "\\t", False),
("S3", "s3://gdelt-open-data/events/20190914.export.csv", 58, "\\t", False),
],
)
def test__read_from_public_provider(download_gcs_public_data, storage_provider, url, columns_nb, separator, has_header):
# inject temp file path that was downloaded by the test as URL
url = download_gcs_public_data if storage_provider == "local" else url
config = {
"format": "csv",
"dataset_name": "output",
"reader_options": json.dumps({"sep": separator, "nrows": 42}),
"provider": {"storage": storage_provider},
"url": url,
}

check_read(config, expected_columns=columns_nb)


def test__read_from_private_gcs(google_cloud_service_credentials, private_google_cloud_file):
config = {
"dataset_name": "output",
"format": "csv",
"url": private_google_cloud_file,
"reader_options": json.dumps({"sep": ",", "nrows": 42}),
"provider": {
"storage": "GCS",
"service_account_json": json.dumps(google_cloud_service_credentials),
},
}
check_read(config)


def test__read_from_private_aws(aws_credentials, private_aws_file):
config = {
"dataset_name": "output",
"format": "csv",
"url": private_aws_file,
"reader_options": json.dumps({"sep": ",", "nrows": 42}),
"provider": {
"storage": "S3",
"aws_access_key_id": aws_credentials["aws_access_key_id"],
"aws_secret_access_key": aws_credentials["aws_secret_access_key"],
},
}
check_read(config)
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"filename": "integrationTestFile",
"dataset_name": "integrationTestFile",
"format": "csv",
"reader_options": "{\"sep\": \",\", \"nrows\": 20}",
"url": "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv",
Expand Down
Loading

0 comments on commit e8ea7a1

Please sign in to comment.