Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "amplitude-data-wrapper"
version = "0.5.1"
version = "0.5.2"
description = "python wrapper for using the amplitude analytics and taxonomy APIs"
authors = [
{name = "Tobias McVey", email = "tobias.mcvey@nav.no"},
Expand Down
23 changes: 10 additions & 13 deletions requirements/dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#
appnope==0.1.4
# via ipykernel
asttokens==2.4.1
asttokens==3.0.0
# via stack-data
backports-tarfile==1.2.0
# via jaraco-context
Expand Down Expand Up @@ -37,12 +37,10 @@ executing==2.1.0
idna==3.10
# via requests
importlib-metadata==8.5.0
# via
# keyring
# twine
# via keyring
ipykernel==6.29.5
# via amplitude-data-wrapper (pyproject.toml)
ipython==8.29.0
ipython==8.30.0
# via ipykernel
isort==5.13.2
# via amplitude-data-wrapper (pyproject.toml)
Expand Down Expand Up @@ -84,13 +82,14 @@ mypy-extensions==1.0.0
# mypy
nest-asyncio==1.6.0
# via ipykernel
nh3==0.2.18
nh3==0.2.19
# via readme-renderer
packaging==24.2
# via
# black
# build
# ipykernel
# twine
parso==0.8.4
# via jedi
pathspec==0.12.1
Expand All @@ -99,7 +98,7 @@ pexpect==4.9.0
# via ipython
pip-tools==7.4.1
# via amplitude-data-wrapper (pyproject.toml)
pkginfo==1.10.0
pkginfo==1.12.0
# via twine
platformdirs==4.3.6
# via
Expand Down Expand Up @@ -143,13 +142,11 @@ rfc3986==2.0.0
# via twine
rich==13.9.4
# via twine
six==1.16.0
# via
# asttokens
# python-dateutil
six==1.17.0
# via python-dateutil
stack-data==0.6.3
# via ipython
tomli==2.1.0
tomli==2.2.1
# via
# black
# build
Expand All @@ -169,7 +166,7 @@ traitlets==5.14.3
# jupyter-client
# jupyter-core
# matplotlib-inline
twine==5.1.1
twine==6.0.1
# via amplitude-data-wrapper (pyproject.toml)
typing-extensions==4.12.2
# via
Expand Down
40 changes: 14 additions & 26 deletions src/amplitude_data_wrapper/analytics_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ def get_chart(
auth=(api_key, secret),
proxies=proxy,
)
logging.info("Success. Retrieved data for chart_id %s", chart_id)
return r


Expand Down Expand Up @@ -91,6 +92,7 @@ def find_user(
auth=(api_key, secret),
proxies=proxy,
)
logging.info("Success. Found user %s", user)
return r


Expand Down Expand Up @@ -148,9 +150,8 @@ def get_cohort(
)
response.raise_for_status()
json_response = response.json()
print("JSON Response")
for key, value in json_response.items():
print(key, ":", value, "\n")
logging.info("%s : %s", key, value)
header_status = 0
request_id = json_response["request_id"]
while header_status != 200:
Expand All @@ -165,15 +166,11 @@ def get_cohort(
status_response.raise_for_status()

if status_response.status_code == 202:
print(f"Waiting for {request_id} to be completed. Current status:")
print(f"{status_response.headers}")
json_status = status_response.json()
for key, value in json_status.items():
print(key, ":", value, "\n")
logging.info("Waiting for request_id %s to be completed", request_id)
time.sleep(5)
elif status_response.status_code == 200:
download_url = f"{url[region]}/api/5/cohorts/request/{request_id}/file"
print(f"Downloading from {download_url}")
logging.info("Downloading from %s", download_url)
file_download = s.get(
download_url,
headers=headers,
Expand All @@ -183,20 +180,18 @@ def get_cohort(
proxies=proxy,
)
file_download.raise_for_status()
print(f"{file_download.headers}")
with tqdm.wrapattr(
open(filename, "wb"),
"write",
miniters=1,
total=int(file_download.headers.get("content-length", 0)),
desc=filename,
) as fout:
print(file_download.headers)
for chunk in file_download.iter_content(chunk_size=8192):
fout.write(chunk)
header_status = 200
else:
print(
logging.error(
f"An error occurred, retrying to reach request ID {request_id} and request URL {download_url} in 10 seconds"
)
time.sleep(10)
Expand Down Expand Up @@ -261,7 +256,7 @@ def delete_user_data(
auth=(api_key, secret),
proxies=proxy,
)
print(f"Sletter brukere")
logging.info("Sletter brukere")
return r


Expand Down Expand Up @@ -363,38 +358,31 @@ def export_project_data(
stream=True,
proxies=proxy,
)
print(f"Export request submitted")
logging.info("Export request submitted")
response.raise_for_status()
header_status = 0
while header_status != 200:
print(f"Waiting for response")
logging.info("Waiting for response")
if response.status_code == 400:
print(
f"The file size of the exported data is too large. Shorten the time ranges and try again. The limit size is 4GB."
)
logging.info("The file size of the exported data is too large. Shorten the time ranges and try again. The limit size is 4GB.")
elif response.status_code == 404:
print(
f"Request data for a time range during which no data has been collected for the project, then you will receive a 404 response from our server."
)
logging.info("Request data for a time range during which no data has been collected for the project, then you will receive a 404 response from our server.")
elif response.status_code == 504:
print(
f"The amount of data is large causing a timeout. For large amounts of data, the Amazon S3 destination is recommended."
)
logging.info("The amount of data is large causing a timeout. For large amounts of data, the Amazon S3 destination is recommended.")
elif response.status_code == 200:
print(f"Success. downloading file as {filename}")
logging.info("Success. Downloading file as %s", filename)
with tqdm.wrapattr(
open(filename, "wb"),
"write",
miniters=1,
total=int(response.headers.get("content-length", 0)),
desc=filename,
) as fout:
print(response.headers)
for chunk in response.iter_content(chunk_size=8192):
fout.write(chunk)
header_status = 200
else:
print(f"Some other error occurred. Retrying again in 10 seconds.")
logging.error("Some other error occurred. Retrying again in 10 seconds.")
time.sleep(10)
return filename

Expand Down