Skip to content

Commit

Permalink
Merge 761f62d into 375794e
Browse files Browse the repository at this point in the history
  • Loading branch information
codedawi committed May 20, 2020
2 parents 375794e + 761f62d commit cdcb77f
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 12 deletions.
46 changes: 39 additions & 7 deletions app/services/location/jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,22 +142,31 @@ async def get_locations():
# Get all of the data categories locations.
confirmed = await get_category("confirmed")
deaths = await get_category("deaths")
# recovered = await get_category("recovered")
recovered = await get_category("recovered")

locations_confirmed = confirmed["locations"]
locations_deaths = deaths["locations"]
# locations_recovered = recovered["locations"]
locations_recovered = recovered["locations"]

# Final locations to return.
locations = []

# ***************************************************************************
# TODO: This iteration approach assumes the indexes remain the same
# and opens us to a CRITICAL ERROR. The removal of a column in the data source
# would break the API or SHIFT all the data confirmed, deaths, recovery producting
# incorrect data to consumers.
# ***************************************************************************
# Go through locations.
for index, location in enumerate(locations_confirmed):
# Get the timelines.

# TEMP: Fix for merging recovery data. See TODO above for more details.
key = (location["country"], location["province"])

timelines = {
"confirmed": locations_confirmed[index]["history"],
"deaths": locations_deaths[index]["history"],
# 'recovered' : locations_recovered[index]['history'],
"confirmed": location["history"],
"deaths": parse_history(key, locations_deaths, index),
"recovered": parse_history(key, locations_recovered, index),
}

# Grab coordinates.
Expand Down Expand Up @@ -188,11 +197,34 @@ async def get_locations():
for date, amount in timelines["deaths"].items()
}
),
"recovered": Timeline({}),
"recovered": Timeline(
{
datetime.strptime(date, "%m/%d/%y").isoformat() + "Z": amount
for date, amount in timelines["recovered"].items()
}
),
},
)
)
LOGGER.info(f"{data_id} Data normalized")

# Finally, return the locations.
return locations


def parse_history(key: tuple, locations: list, index: int):
"""
Helper for validating and extracting history content from
locations data based on index. Validates with the current country/province
key to make sure no index/column issue.
TEMP: solution because implement a more efficient and better approach in the refactor.
"""
location_history = {}
try:
if key == (locations[index]["country"], locations[index]["province"]):
location_history = locations[index]["history"]
except (IndexError, KeyError):
LOGGER.debug(f"iteration data merge error: {index} {key}")

return location_history
24 changes: 24 additions & 0 deletions tests/test_jhu.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,27 @@ async def test_get_locations(mock_client_session):
# `jhu.get_locations()` creates id based on confirmed list
location_confirmed = await jhu.get_category("confirmed")
assert len(output) == len(location_confirmed["locations"])

# `jhu.get_locations()` creates id based on deaths list
location_deaths = await jhu.get_category("deaths")
assert len(output) == len(location_deaths["locations"])

# `jhu.get_locations()` creates id based on recovered list
location_recovered = await jhu.get_category("recovered")
assert len(output) == len(location_recovered["locations"])


@pytest.mark.parametrize(
"key, locations, index, expected",
[
(("Thailand", "TH"), [{"country": "Thailand", "province": "TH", "history": {"test": "yes"}}], 0, {"test": "yes"}), # Success
(("Deutschland", "DE"), [{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}], 1, {}), # IndexError
(("US", "NJ"), [{"country": "Deutschland", "province": "DE", "history": {"test": "no"}}], 0, {}), # Invaid Key Merge
],
)
def test_parse_history(key, locations, index, expected):
"""
Test validating and extracting history content from
locations data based on index.
"""
assert jhu.parse_history(key, locations, index) == expected
8 changes: 3 additions & 5 deletions tests/test_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,8 +112,7 @@ async def test_v2_locations(self):
with open(filepath, "r") as file:
expected_json_output = file.read()

# TODO: Why is this failing?
# assert return_data == json.loads(expected_json_output)
assert return_data == json.loads(expected_json_output)

async def test_v2_locations_id(self):
state = "locations"
Expand All @@ -130,8 +129,7 @@ async def test_v2_locations_id(self):
with open(filepath, "r") as file:
expected_json_output = file.read()

# TODO: Why is this failing?
# assert return_data == expected_json_output
assert return_data == json.loads(expected_json_output)


@pytest.mark.asyncio
Expand Down Expand Up @@ -183,4 +181,4 @@ async def test_latest(async_api_client, query_params, mock_client_session):

assert response.status_code == 200
assert response_json["latest"]["confirmed"]
assert response_json["latest"]["deaths"]
assert response_json["latest"]["deaths"]

0 comments on commit cdcb77f

Please sign in to comment.