Skip to content

Commit

Permalink
🐛 Source Amazon Seller Partner: Fix check for Vendor accounts (#35331)
Browse files Browse the repository at this point in the history
  • Loading branch information
tolik0 committed Feb 19, 2024
1 parent 088b9b7 commit f2efd27
Show file tree
Hide file tree
Showing 11 changed files with 171 additions and 174 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ data:
connectorSubtype: api
connectorType: source
definitionId: e55879a8-0ef8-4557-abcf-ab34c53ec460
dockerImageTag: 3.4.0
dockerImageTag: 3.5.0
dockerRepository: airbyte/source-amazon-seller-partner
documentationUrl: https://docs.airbyte.com/integrations/sources/amazon-seller-partner
githubIssueLabel: source-amazon-seller-partner
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
#


import traceback
from os import getenv
from typing import Any, List, Mapping, Optional, Tuple

Expand Down Expand Up @@ -115,10 +116,11 @@ def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) ->

if config.get("account_type", "Seller") == "Seller":
stream_to_check = Orders(**stream_kwargs)
next(stream_to_check.read_records(sync_mode=SyncMode.full_refresh))
else:
stream_to_check = VendorSalesReports(**stream_kwargs)

next(stream_to_check.read_records(sync_mode=SyncMode.full_refresh))
stream_to_check = VendorOrders(**stream_kwargs)
stream_slices = list(stream_to_check.stream_slices(sync_mode=SyncMode.full_refresh))
next(stream_to_check.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slices[0]))

return True, None
except Exception as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import csv
import gzip
import json
import logging
import os
import time
from abc import ABC, abstractmethod
Expand Down Expand Up @@ -282,6 +283,15 @@ def _retrieve_report(self, report_id: str) -> Mapping[str, Any]:

return report_payload

def _retrieve_report_result(self, report_document_id: str) -> requests.Response:
request_headers = self.request_headers()
request = self._create_prepared_request(
path=self.path(document_id=report_document_id),
headers=dict(request_headers, **self.authenticator.get_auth_header()),
params=self.request_params(),
)
return self._send_request(request, {})

@default_backoff_handler(factor=5, max_tries=5)
def download_and_decompress_report_document(self, payload: dict) -> str:
"""
Expand Down Expand Up @@ -381,23 +391,29 @@ def read_records(
if processing_status == ReportProcessingStatus.DONE:
# retrieve and decrypt the report document
document_id = report_payload["reportDocumentId"]
request_headers = self.request_headers()
request = self._create_prepared_request(
path=self.path(document_id=document_id),
headers=dict(request_headers, **self.authenticator.get_auth_header()),
params=self.request_params(),
)
response = self._send_request(request, {})
response = self._retrieve_report_result(document_id)

for record in self.parse_response(response, stream_state, stream_slice):
if report_end_date:
record["dataEndTime"] = report_end_date.strftime(DATE_FORMAT)
yield record
elif processing_status == ReportProcessingStatus.FATAL:
# retrieve and decrypt the report document
try:
document_id = report_payload["reportDocumentId"]
response = self._retrieve_report_result(document_id)

document = self.download_and_decompress_report_document(response.json())
error_response = json.loads(document)
except Exception as e:
logging.error(f"Failed to retrieve the report result document for stream '{self.name}'. Exception: {e}")
error_response = "Failed to retrieve the report result document."

raise AirbyteTracedException(
internal_message=(
f"Failed to retrieve the report '{self.name}' for period "
f"{stream_slice['dataStartTime']}-{stream_slice['dataEndTime']} "
"due to Amazon Seller Partner platform issues. This will be read during the next sync."
f"{stream_slice['dataStartTime']}-{stream_slice['dataEndTime']}. "
f"This will be read during the next sync. Error: {error_response}"
)
)
elif processing_status == ReportProcessingStatus.CANCELLED:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,13 @@


class RequestBuilder:

@classmethod
def auth_endpoint(cls) -> RequestBuilder:
request_headers = {"Content-Type": "application/x-www-form-urlencoded"}
request_body = (
f"grant_type=refresh_token&client_id={LWA_APP_ID}&"
f"client_secret={LWA_CLIENT_SECRET}&refresh_token={REFRESH_TOKEN}"
)
return cls("auth/o2/token").with_base_url("https://api.amazon.com").with_headers(request_headers).with_body(
request_body
f"grant_type=refresh_token&client_id={LWA_APP_ID}&" f"client_secret={LWA_CLIENT_SECRET}&refresh_token={REFRESH_TOKEN}"
)
return cls("auth/o2/token").with_base_url("https://api.amazon.com").with_headers(request_headers).with_body(request_body)

@classmethod
def create_report_endpoint(cls, report_name: str) -> RequestBuilder:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,11 @@ def _check_report_status_response(
"dataEndTime": CONFIG_END_DATE,
"createdTime": CONFIG_START_DATE,
"dataStartTime": CONFIG_START_DATE,
"reportDocumentId": report_document_id,
}
if processing_status == ReportProcessingStatus.DONE:
response_body.update(
{
"reportDocumentId": report_document_id,
"processingEndTime": CONFIG_START_DATE,
"processingStartTime": CONFIG_START_DATE,
}
Expand All @@ -141,18 +141,22 @@ def _get_document_download_url_response(
return build_response(response_body, status_code=HTTPStatus.OK)


def _download_document_response(
stream_name: str, data_format: Optional[str] = "csv", compressed: Optional[bool] = False
) -> HttpResponse:
def _download_document_response(stream_name: str, data_format: Optional[str] = "csv", compressed: Optional[bool] = False) -> HttpResponse:
response_body = find_template(stream_name, __file__, data_format)
if compressed:
response_body = gzip.compress(response_body.encode("iso-8859-1"))
return HttpResponse(body=response_body, status_code=HTTPStatus.OK)


def _download_document_error_response(compressed: Optional[bool] = False) -> HttpResponse:
response_body = '{"errorDetails":"Error in report request: This report type requires the reportPeriod, distributorView, sellingProgram reportOption to be specified. Please review the document for this report type on GitHub, provide a value for this reportOption in your request, and try again."}'
if compressed:
response_body = gzip.compress(response_body.encode("iso-8859-1"))
return HttpResponse(body=response_body, status_code=HTTPStatus.OK)


@freezegun.freeze_time(NOW.isoformat())
class TestFullRefresh:

@staticmethod
def _read(stream_name: str, config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput:
return read_output(
Expand All @@ -164,9 +168,7 @@ def _read(stream_name: str, config_: ConfigBuilder, expecting_exception: bool =

@pytest.mark.parametrize(("stream_name", "data_format"), STREAMS)
@HttpMocker()
def test_given_report_when_read_then_return_records(
self, stream_name: str, data_format: str, http_mocker: HttpMocker
) -> None:
def test_given_report_when_read_then_return_records(self, stream_name: str, data_format: str, http_mocker: HttpMocker) -> None:
mock_auth(http_mocker)

http_mocker.post(_create_report_request(stream_name).build(), _create_report_response(_REPORT_ID))
Expand Down Expand Up @@ -329,9 +331,7 @@ def test_given_report_access_forbidden_when_read_then_no_records_and_error_logge
) -> None:
mock_auth(http_mocker)

http_mocker.post(
_create_report_request(stream_name).build(), response_with_status(status_code=HTTPStatus.FORBIDDEN)
)
http_mocker.post(_create_report_request(stream_name).build(), response_with_status(status_code=HTTPStatus.FORBIDDEN))

output = self._read(stream_name, config())
message_on_access_forbidden = (
Expand All @@ -354,9 +354,7 @@ def test_given_report_status_cancelled_when_read_then_stream_completed_successfu
_check_report_status_response(stream_name, processing_status=ReportProcessingStatus.CANCELLED),
)

message_on_report_cancelled = (
f"The report for stream '{stream_name}' was cancelled or there is no data to return."
)
message_on_report_cancelled = f"The report for stream '{stream_name}' was cancelled or there is no data to return."

output = self._read(stream_name, config())
assert_message_in_log_output(message_on_report_cancelled, output)
Expand All @@ -372,14 +370,27 @@ def test_given_report_status_fatal_when_read_then_exception_raised(
http_mocker.post(_create_report_request(stream_name).build(), _create_report_response(_REPORT_ID))
http_mocker.get(
_check_report_status_request(_REPORT_ID).build(),
_check_report_status_response(stream_name, processing_status=ReportProcessingStatus.FATAL),
_check_report_status_response(
stream_name, processing_status=ReportProcessingStatus.FATAL, report_document_id=_REPORT_DOCUMENT_ID
),
)

http_mocker.get(
_get_document_download_url_request(_REPORT_DOCUMENT_ID).build(),
_get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID),
)
http_mocker.get(
_download_document_request(_DOCUMENT_DOWNLOAD_URL).build(),
[
response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR),
_download_document_error_response(),
],
)

output = self._read(stream_name, config(), expecting_exception=True)
assert output.errors[-1].trace.error.failure_type == FailureType.config_error
assert (
f"Failed to retrieve the report '{stream_name}' for period {CONFIG_START_DATE}-{CONFIG_END_DATE} "
"due to Amazon Seller Partner platform issues. This will be read during the next sync."
f"Failed to retrieve the report '{stream_name}' for period {CONFIG_START_DATE}-{CONFIG_END_DATE}. This will be read during the next sync. Error: {{'errorDetails': 'Error in report request: This report type requires the reportPeriod, distributorView, sellingProgram reportOption to be specified. Please review the document for this report type on GitHub, provide a value for this reportOption in your request, and try again.'}}"
) in output.errors[-1].trace.error.message

@pytest.mark.parametrize(
Expand All @@ -405,9 +416,7 @@ def test_given_report_with_incorrect_date_format_when_read_then_formatted(
_get_document_download_url_request(_REPORT_DOCUMENT_ID).build(),
_get_document_download_url_response(_DOCUMENT_DOWNLOAD_URL, _REPORT_DOCUMENT_ID),
)
http_mocker.get(
_download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), _download_document_response(stream_name)
)
http_mocker.get(_download_document_request(_DOCUMENT_DOWNLOAD_URL).build(), _download_document_response(stream_name))

output = self._read(stream_name, config())
assert len(output.records) == DEFAULT_EXPECTED_NUMBER_OF_RECORDS
Expand All @@ -425,9 +434,7 @@ def test_given_http_error_500_on_create_report_when_read_then_no_records_and_err
response_with_status(status_code=HTTPStatus.INTERNAL_SERVER_ERROR),
)

message_on_backoff_exception = (
f"The report for stream '{stream_name}' was cancelled due to several failed retry attempts."
)
message_on_backoff_exception = f"The report for stream '{stream_name}' was cancelled due to several failed retry attempts."

output = self._read(stream_name, config())
assert_message_in_log_output(message_on_backoff_exception, output)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ def _shipping_label_record() -> RecordBuilder:

@freezegun.freeze_time(NOW.isoformat())
class TestFullRefresh:

@staticmethod
def _read(config_: ConfigBuilder, expecting_exception: bool = False) -> EntrypointOutput:
return read_output(
Expand All @@ -89,9 +88,7 @@ def test_given_two_pages_when_read_then_return_records(self, http_mocker: HttpMo
mock_auth(http_mocker)
http_mocker.get(
_vendor_direct_fulfillment_shipping_request().build(),
_vendor_direct_fulfillment_shipping_response().with_pagination().with_record(
_shipping_label_record()
).build(),
_vendor_direct_fulfillment_shipping_response().with_pagination().with_record(_shipping_label_record()).build(),
)
query_params_with_next_page_token = {
_REPLICATION_START_FIELD: _START_DATE.strftime(TIME_FORMAT),
Expand All @@ -100,9 +97,10 @@ def test_given_two_pages_when_read_then_return_records(self, http_mocker: HttpMo
}
http_mocker.get(
_vendor_direct_fulfillment_shipping_request().with_query_params(query_params_with_next_page_token).build(),
_vendor_direct_fulfillment_shipping_response().with_record(_shipping_label_record()).with_record(
_shipping_label_record()
).build(),
_vendor_direct_fulfillment_shipping_response()
.with_record(_shipping_label_record())
.with_record(_shipping_label_record())
.build(),
)

output = self._read(config().with_start_date(_START_DATE).with_end_date(_END_DATE))
Expand Down Expand Up @@ -135,9 +133,7 @@ def test_given_two_slices_when_read_then_return_records(self, http_mocker: HttpM
assert len(output.records) == 2

@HttpMocker()
def test_given_http_status_500_then_200_when_read_then_retry_and_return_records(
self, http_mocker: HttpMocker
) -> None:
def test_given_http_status_500_then_200_when_read_then_retry_and_return_records(self, http_mocker: HttpMocker) -> None:
mock_auth(http_mocker)
http_mocker.get(
_vendor_direct_fulfillment_shipping_request().build(),
Expand All @@ -151,9 +147,7 @@ def test_given_http_status_500_then_200_when_read_then_retry_and_return_records(
assert len(output.records) == 1

@HttpMocker()
def test_given_http_status_500_on_availability_when_read_then_raise_system_error(
self, http_mocker: HttpMocker
) -> None:
def test_given_http_status_500_on_availability_when_read_then_raise_system_error(self, http_mocker: HttpMocker) -> None:
mock_auth(http_mocker)
http_mocker.get(
_vendor_direct_fulfillment_shipping_request().build(),
Expand All @@ -166,7 +160,6 @@ def test_given_http_status_500_on_availability_when_read_then_raise_system_error

@freezegun.freeze_time(NOW.isoformat())
class TestIncremental:

@staticmethod
def _read(
config_: ConfigBuilder, state: Optional[List[AirbyteStateMessage]] = None, expecting_exception: bool = False
Expand Down Expand Up @@ -196,9 +189,10 @@ def test_when_read_then_state_message_produced_and_state_match_latest_record(sel
mock_auth(http_mocker)
http_mocker.get(
_vendor_direct_fulfillment_shipping_request().build(),
_vendor_direct_fulfillment_shipping_response().with_record(_shipping_label_record()).with_record(
_shipping_label_record()
).build(),
_vendor_direct_fulfillment_shipping_response()
.with_record(_shipping_label_record())
.with_record(_shipping_label_record())
.build(),
)

output = self._read(config().with_start_date(_START_DATE).with_end_date(_END_DATE))
Expand All @@ -217,21 +211,21 @@ def test_given_state_when_read_then_state_value_is_created_after_query_param(sel
_REPLICATION_START_FIELD: _START_DATE.strftime(TIME_FORMAT),
_REPLICATION_END_FIELD: _END_DATE.strftime(TIME_FORMAT),
}
query_params_incremental_read = {
_REPLICATION_START_FIELD: state_value, _REPLICATION_END_FIELD: _END_DATE.strftime(TIME_FORMAT)
}
query_params_incremental_read = {_REPLICATION_START_FIELD: state_value, _REPLICATION_END_FIELD: _END_DATE.strftime(TIME_FORMAT)}

http_mocker.get(
_vendor_direct_fulfillment_shipping_request().with_query_params(query_params_first_read).build(),
_vendor_direct_fulfillment_shipping_response().with_record(_shipping_label_record()).with_record(
_shipping_label_record()
).build(),
_vendor_direct_fulfillment_shipping_response()
.with_record(_shipping_label_record())
.with_record(_shipping_label_record())
.build(),
)
http_mocker.get(
_vendor_direct_fulfillment_shipping_request().with_query_params(query_params_incremental_read).build(),
_vendor_direct_fulfillment_shipping_response().with_record(_shipping_label_record()).with_record(
_shipping_label_record()
).build(),
_vendor_direct_fulfillment_shipping_response()
.with_record(_shipping_label_record())
.with_record(_shipping_label_record())
.build(),
)

output = self._read(
Expand Down
Loading

0 comments on commit f2efd27

Please sign in to comment.