-
Notifications
You must be signed in to change notification settings - Fork 3.7k
/
source.py
523 lines (431 loc) · 22.8 KB
/
source.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
#
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
import datetime
import json
import logging
import pkgutil
import uuid
from abc import ABC
from http import HTTPStatus
from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple
import dpath
import jsonschema
import pendulum
import requests
from airbyte_cdk.models import FailureType, SyncMode
from airbyte_cdk.sources import AbstractSource
from airbyte_cdk.sources.streams import Stream
from airbyte_cdk.sources.streams.http import HttpStream
from airbyte_cdk.utils import AirbyteTracedException
from requests import HTTPError
from source_google_analytics_data_api import utils
from source_google_analytics_data_api.utils import DATE_FORMAT, WRONG_DIMENSIONS, WRONG_JSON_SYNTAX, WRONG_METRICS
from .api_quota import GoogleAnalyticsApiQuota
from .utils import (
authenticator_class_map,
check_invalid_property_error,
check_no_property_error,
get_dimensions_type,
get_metrics_type,
get_source_defined_primary_key,
metrics_type_to_python,
)
# set the quota handler globaly since limitations are the same for all streams
# the initial values should be saved once and tracked for each stream, inclusivelly.
GoogleAnalyticsQuotaHandler: GoogleAnalyticsApiQuota = GoogleAnalyticsApiQuota()
LOOKBACK_WINDOW = datetime.timedelta(days=2)
class ConfigurationError(Exception):
pass
class MetadataDescriptor:
def __init__(self):
self._metadata = None
def __get__(self, instance, owner):
if not self._metadata:
stream = GoogleAnalyticsDataApiMetadataStream(config=instance.config, authenticator=instance.config["authenticator"])
try:
metadata = next(stream.read_records(sync_mode=SyncMode.full_refresh), None)
except HTTPError as e:
if e.response.status_code == HTTPStatus.UNAUTHORIZED:
internal_message = "Unauthorized error reached."
message = "Can not get metadata with unauthorized credentials. Try to re-authenticate in source settings."
unauthorized_error = AirbyteTracedException(
message=message, internal_message=internal_message, failure_type=FailureType.config_error
)
raise unauthorized_error
if not metadata:
raise Exception("failed to get metadata, over quota, try later")
self._metadata = {
"dimensions": {m.get("apiName"): m for m in metadata.get("dimensions", [{}])},
"metrics": {m.get("apiName"): m for m in metadata.get("metrics", [{}])},
}
return self._metadata
class GoogleAnalyticsDataApiAbstractStream(HttpStream, ABC):
url_base = "https://analyticsdata.googleapis.com/v1beta/"
http_method = "POST"
raise_on_http_errors = True
def __init__(self, *, config: Mapping[str, Any], page_size: int = 100_000, **kwargs):
super().__init__(**kwargs)
self._config = config
self._source_defined_primary_key = get_source_defined_primary_key(self.name)
# default value is 100 000 due to determination of maximum limit value in official documentation
# https://developers.google.com/analytics/devguides/reporting/data/v1/basics#pagination
self._page_size = page_size
@property
def config(self):
return self._config
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value: int):
self._page_size = value
# handle the quota errors with prepared values for:
# `should_retry`, `backoff_time`, `raise_on_http_errors`, `stop_iter` based on quota scenario.
@GoogleAnalyticsQuotaHandler.handle_quota()
def should_retry(self, response: requests.Response) -> bool:
if response.status_code == requests.codes.too_many_requests:
setattr(self, "raise_on_http_errors", GoogleAnalyticsQuotaHandler.raise_on_http_errors)
return GoogleAnalyticsQuotaHandler.should_retry
# for all other cases not covered by GoogleAnalyticsQuotaHandler
return super().should_retry(response)
def backoff_time(self, response: requests.Response) -> Optional[float]:
# handle the error with prepared GoogleAnalyticsQuotaHandler backoff value
if response.status_code == requests.codes.too_many_requests:
return GoogleAnalyticsQuotaHandler.backoff_time
# for all other cases not covered by GoogleAnalyticsQuotaHandler
return super().backoff_time(response)
class GoogleAnalyticsDataApiBaseStream(GoogleAnalyticsDataApiAbstractStream):
"""
https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/runReport
"""
_record_date_format = "%Y%m%d"
offset = 0
metadata = MetadataDescriptor()
@property
def cursor_field(self) -> Optional[str]:
return "date" if "date" in self.config.get("dimensions", []) else []
@property
def primary_key(self):
pk = ["property_id"] + self.config.get("dimensions", [])
if "cohort_spec" not in self.config and "date" not in pk:
pk.append("startDate")
pk.append("endDate")
return pk
@staticmethod
def add_dimensions(dimensions, row) -> dict:
return dict(zip(dimensions, [v["value"] for v in row["dimensionValues"]]))
@staticmethod
def add_metrics(metrics, metric_types, row) -> dict:
def _metric_type_to_python(metric_data: Tuple[str, str]) -> Any:
metric_name, metric_value = metric_data
python_type = metrics_type_to_python(metric_types[metric_name])
return metric_name, python_type(metric_value)
return dict(map(_metric_type_to_python, zip(metrics, [v["value"] for v in row["metricValues"]])))
def get_json_schema(self) -> Mapping[str, Any]:
"""
Override get_json_schema CDK method to retrieve the schema information for GoogleAnalyticsV4 Object dynamically.
"""
schema: Dict[str, Any] = {
"$schema": "https://json-schema.org/draft-07/schema#",
"type": ["null", "object"],
"additionalProperties": True,
"properties": {
"property_id": {"type": ["string"]},
},
}
schema["properties"].update(
{
d.replace(":", "_"): {
"type": get_dimensions_type(d),
"description": self.metadata["dimensions"].get(d, {}).get("description", d),
}
for d in self.config["dimensions"]
}
)
# skipping startDate and endDate fields for cohort stream, because it doesn't support startDate and endDate fields
if "cohort_spec" not in self.config and "date" not in self.config["dimensions"]:
schema["properties"].update(
{
"startDate": {"type": ["null", "string"], "format": "date"},
"endDate": {"type": ["null", "string"], "format": "date"},
}
)
schema["properties"].update(
{
m.replace(":", "_"): {
"type": ["null", get_metrics_type(self.metadata["metrics"].get(m, {}).get("type"))],
"description": self.metadata["metrics"].get(m, {}).get("description", m),
}
for m in self.config["metrics"]
}
)
return schema
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
r = response.json()
if "rowCount" in r:
total_rows = r["rowCount"]
if self.offset == 0:
self.offset = self.page_size
else:
self.offset += self.page_size
if total_rows <= self.offset:
self.offset = 0
return
return {"offset": self.offset}
def path(
self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
) -> str:
return f"properties/{self.config['property_id']}:runReport"
def parse_response(
self,
response: requests.Response,
*,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, Any] = None,
next_page_token: Mapping[str, Any] = None,
) -> Iterable[Mapping]:
r = response.json()
dimensions = [h.get("name").replace(":", "_") if "name" in h else None for h in r.get("dimensionHeaders", [{}])]
metrics = [h.get("name").replace(":", "_") if "name" in h else None for h in r.get("metricHeaders", [{}])]
metrics_type_map = {h.get("name").replace(":", "_"): h.get("type") for h in r.get("metricHeaders", [{}]) if "name" in h}
for row in r.get("rows", []):
record = {
"property_id": self.config["property_id"],
**self.add_dimensions(dimensions, row),
**self.add_metrics(metrics, metrics_type_map, row),
}
# https://github.com/airbytehq/airbyte/pull/26283
# We pass the uuid field for synchronizations which still have the old
# configured_catalog with the old primary key. We need it to avoid of removal of rows
# in the deduplication process. As soon as the customer press "refresh source schema"
# this part is no longer needed.
if self._source_defined_primary_key == [["uuid"]]:
record["uuid"] = str(uuid.uuid4())
if "cohort_spec" not in self.config and "date" not in record:
record["startDate"] = stream_slice["startDate"]
record["endDate"] = stream_slice["endDate"]
yield record
def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]):
updated_state = utils.string_to_date(latest_record[self.cursor_field], self._record_date_format)
stream_state_value = current_stream_state.get(self.cursor_field)
if stream_state_value:
stream_state_value = utils.string_to_date(stream_state_value, self._record_date_format, old_format=DATE_FORMAT)
updated_state = max(updated_state, stream_state_value)
current_stream_state[self.cursor_field] = updated_state.strftime(self._record_date_format)
return current_stream_state
def request_body_json(
self,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, Any] = None,
next_page_token: Mapping[str, Any] = None,
) -> Optional[Mapping]:
payload = {
"metrics": [{"name": m} for m in self.config["metrics"]],
"dimensions": [{"name": d} for d in self.config["dimensions"]],
"dateRanges": [stream_slice],
"returnPropertyQuota": True,
"offset": str(0),
"limit": str(self.page_size),
}
if next_page_token and next_page_token.get("offset") is not None:
payload.update({"offset": str(next_page_token["offset"])})
return payload
def stream_slices(
self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
) -> Iterable[Optional[Mapping[str, Any]]]:
today: datetime.date = datetime.date.today()
start_date = stream_state and stream_state.get(self.cursor_field)
if start_date:
start_date = utils.string_to_date(start_date, self._record_date_format, old_format=DATE_FORMAT)
start_date -= LOOKBACK_WINDOW
start_date = max(start_date, self.config["date_ranges_start_date"])
else:
start_date = self.config["date_ranges_start_date"]
while start_date <= today:
# stop producing slices if 429 + specific scenario is hit
# see GoogleAnalyticsQuotaHandler for more info.
if GoogleAnalyticsQuotaHandler.stop_iter:
return []
else:
yield {
"startDate": utils.date_to_string(start_date),
"endDate": utils.date_to_string(min(start_date + datetime.timedelta(days=self.config["window_in_days"] - 1), today)),
}
start_date += datetime.timedelta(days=self.config["window_in_days"])
class PivotReport(GoogleAnalyticsDataApiBaseStream):
def request_body_json(
self,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, Any] = None,
next_page_token: Mapping[str, Any] = None,
) -> Optional[Mapping]:
payload = super().request_body_json(stream_state, stream_slice, next_page_token)
# remove offset and limit fields according to their absence in
# https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/runPivotReport
payload.pop("offset", None)
payload.pop("limit", None)
payload["pivots"] = self.config["pivots"]
return payload
def path(
self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
) -> str:
return f"properties/{self.config['property_id']}:runPivotReport"
class CohortReportMixin:
cursor_field = []
def stream_slices(
self, *, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None
) -> Iterable[Optional[Mapping[str, Any]]]:
yield from [None]
def request_body_json(
self,
stream_state: Mapping[str, Any],
stream_slice: Mapping[str, Any] = None,
next_page_token: Mapping[str, Any] = None,
) -> Optional[Mapping]:
# https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/CohortSpec#Cohort.FIELDS.date_range
# In a cohort request, this dateRange is required and the dateRanges in the RunReportRequest or RunPivotReportRequest
# must be unspecified.
payload = super().request_body_json(stream_state, stream_slice, next_page_token)
payload.pop("dateRanges")
payload["cohortSpec"] = self.config["cohort_spec"]
return payload
class GoogleAnalyticsDataApiMetadataStream(GoogleAnalyticsDataApiAbstractStream):
"""
https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/properties/getMetadata
"""
primary_key = None
http_method = "GET"
def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]:
return None
def path(
self, *, stream_state: Mapping[str, Any] = None, stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None
) -> str:
return f"properties/{self.config['property_id']}/metadata"
def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]:
yield response.json()
class SourceGoogleAnalyticsDataApi(AbstractSource):
@property
def default_date_ranges_start_date(self) -> str:
# set default date ranges start date to 2 years ago
return pendulum.now(tz="UTC").subtract(years=2).format("YYYY-MM-DD")
def _validate_and_transform_start_date(self, start_date: str) -> datetime.date:
start_date = self.default_date_ranges_start_date if not start_date else start_date
try:
start_date = utils.string_to_date(start_date)
except ValueError as e:
raise ConfigurationError(str(e))
return start_date
def _validate_and_transform(self, config: Mapping[str, Any], report_names: Set[str]):
if "custom_reports" in config:
if isinstance(config["custom_reports"], str):
try:
config["custom_reports"] = json.loads(config["custom_reports"])
if not isinstance(config["custom_reports"], list):
raise ValueError
except ValueError:
raise ConfigurationError(WRONG_JSON_SYNTAX)
else:
config["custom_reports"] = []
schema = json.loads(pkgutil.get_data("source_google_analytics_data_api", "defaults/custom_reports_schema.json"))
try:
jsonschema.validate(instance=config["custom_reports"], schema=schema)
except jsonschema.ValidationError as e:
if message := check_no_property_error(e):
raise ConfigurationError(message)
if message := check_invalid_property_error(e):
report_name = dpath.util.get(config["custom_reports"], str(e.absolute_path[0])).get("name")
raise ConfigurationError(message.format(fields=e.message, report_name=report_name))
key_path = "custom_reports"
if e.path:
key_path += "." + ".".join(map(str, e.path))
raise ConfigurationError(f"{key_path}: {e.message}")
existing_names = {r["name"] for r in config["custom_reports"]} & report_names
if existing_names:
existing_names = ", ".join(existing_names)
raise ConfigurationError(f"custom_reports: {existing_names} already exist as a default report(s).")
if "credentials_json" in config["credentials"]:
try:
config["credentials"]["credentials_json"] = json.loads(config["credentials"]["credentials_json"])
except ValueError:
raise ConfigurationError("credentials.credentials_json is not valid JSON")
config["date_ranges_start_date"] = self._validate_and_transform_start_date(config.get("date_ranges_start_date"))
if not config.get("window_in_days"):
source_spec = self.spec(logging.getLogger("airbyte"))
config["window_in_days"] = source_spec.connectionSpecification["properties"]["window_in_days"]["default"]
return config
def get_authenticator(self, config: Mapping[str, Any]):
credentials = config["credentials"]
authenticator_class, get_credentials = authenticator_class_map[credentials["auth_type"]]
return authenticator_class(**get_credentials(credentials))
def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]:
for property_id in config["property_ids"]:
reports = json.loads(pkgutil.get_data("source_google_analytics_data_api", "defaults/default_reports.json"))
try:
config = self._validate_and_transform(config, report_names={r["name"] for r in reports})
except ConfigurationError as e:
return False, str(e)
config["authenticator"] = self.get_authenticator(config)
_config = config.copy()
_config["property_id"] = property_id
metadata = None
try:
# explicitly setting small page size for the check operation not to cause OOM issues
stream = GoogleAnalyticsDataApiMetadataStream(config=_config, authenticator=_config["authenticator"])
metadata = next(stream.read_records(sync_mode=SyncMode.full_refresh), None)
except HTTPError as e:
error_list = [HTTPStatus.BAD_REQUEST, HTTPStatus.FORBIDDEN]
if e.response.status_code in error_list:
internal_message = f"Incorrect Property ID: {property_id}"
property_id_docs_url = (
"https://developers.google.com/analytics/devguides/reporting/data/v1/property-id#what_is_my_property_id"
)
message = f"Access was denied to the property ID entered. Check your access to the Property ID or use Google Analytics {property_id_docs_url} to find your Property ID."
wrong_property_id_error = AirbyteTracedException(
message=message, internal_message=internal_message, failure_type=FailureType.config_error
)
raise wrong_property_id_error
if not metadata:
return False, "Failed to get metadata, over quota, try later"
dimensions = {d["apiName"] for d in metadata["dimensions"]}
metrics = {d["apiName"] for d in metadata["metrics"]}
for report in _config["custom_reports"]:
# Check if custom report dimensions supported. Compare them with dimensions provided by GA API
invalid_dimensions = set(report["dimensions"]) - dimensions
if invalid_dimensions:
invalid_dimensions = ", ".join(invalid_dimensions)
return False, WRONG_DIMENSIONS.format(fields=invalid_dimensions, report_name=report["name"])
# Check if custom report metrics supported. Compare them with metrics provided by GA API
invalid_metrics = set(report["metrics"]) - metrics
if invalid_metrics:
invalid_metrics = ", ".join(invalid_metrics)
return False, WRONG_METRICS.format(fields=invalid_metrics, report_name=report["name"])
report_stream = self.instantiate_report_class(report, _config, page_size=100)
# check if custom_report dimensions + metrics can be combined and report generated
stream_slice = next(report_stream.stream_slices(sync_mode=SyncMode.full_refresh))
next(report_stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice=stream_slice), None)
return True, None
def streams(self, config: Mapping[str, Any]) -> List[Stream]:
reports = json.loads(pkgutil.get_data("source_google_analytics_data_api", "defaults/default_reports.json"))
config = self._validate_and_transform(config, report_names={r["name"] for r in reports})
config["authenticator"] = self.get_authenticator(config)
return [stream for report in reports + config["custom_reports"] for stream in self.instantiate_report_streams(report, config)]
def instantiate_report_streams(self, report: dict, config: Mapping[str, Any], **extra_kwargs) -> GoogleAnalyticsDataApiBaseStream:
for property_id in config["property_ids"]:
yield self.instantiate_report_class(report=report, config={**config, "property_id": property_id})
def instantiate_report_class(self, report: dict, config: Mapping[str, Any], **extra_kwargs) -> GoogleAnalyticsDataApiBaseStream:
cohort_spec = report.get("cohortSpec")
pivots = report.get("pivots")
stream_config = {
**config,
"metrics": report["metrics"],
"dimensions": report["dimensions"],
}
report_class_tuple = (GoogleAnalyticsDataApiBaseStream,)
if pivots:
stream_config["pivots"] = pivots
report_class_tuple = (PivotReport,)
if cohort_spec:
stream_config["cohort_spec"] = cohort_spec
report_class_tuple = (CohortReportMixin, *report_class_tuple)
return type(report["name"], report_class_tuple, {})(config=stream_config, authenticator=config["authenticator"], **extra_kwargs)