Skip to content

Commit

Permalink
feat: added unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
cka-y committed Apr 10, 2024
1 parent 3e4ecfc commit 2b1d9d8
Show file tree
Hide file tree
Showing 16 changed files with 274 additions and 206 deletions.
2 changes: 1 addition & 1 deletion functions-python/extract_bb/src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def get_gtfs_feed_bounds(url: str, dataset_id: str) -> numpy.ndarray:
feed = gtfs_kit.read_feed(url, "km")
return feed.compute_bounds()
except Exception as e:
print(f"[{dataset_id}] Error retrieving GTFS feed from {url}: {e}")
logging.error(f"[{dataset_id}] Error retrieving GTFS feed from {url}: {e}")
raise Exception(e)


Expand Down
3 changes: 2 additions & 1 deletion functions-python/extract_bb/tests/test_extract_bb.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ def test_get_gtfs_feed_bounds(self, mock_gtfs_kit):
for i in range(4):
self.assertEqual(bounds[i], expected_bounds[i])

def test_extract_bb_exception(self):
@patch("extract_bb.src.main.Logger")
def test_extract_bb_exception(self, _):
file_name = faker.file_name()
resource_name = (
f"{faker.uri_path()}/{faker.pystr()}/{faker.pystr()}/{file_name}"
Expand Down
9 changes: 9 additions & 0 deletions functions-python/validation_report_processor/.coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[run]
omit =
*/test*/*
*/helpers/*
*/database_gen/*

[report]
exclude_lines =
if __name__ == .__main__.:
2 changes: 2 additions & 0 deletions functions-python/validation_report_processor/.env.rename_me
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Environment variables for the validation report information extraction to run locally
export FEEDS_DATABASE_URL=${{FEEDS_DATABASE_URL}}
2 changes: 0 additions & 2 deletions functions-python/validation_report_processor/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
functions-framework==3.*
google-cloud-pubsub
google-cloud-logging
google-cloud-datastore
psycopg2-binary==2.9.6
aiohttp~=3.8.6
asyncio~=3.4.3
Expand Down
7 changes: 0 additions & 7 deletions functions-python/validation_report_processor/src/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +0,0 @@
import sys

sys.path.append("..")

import os

print(os.getcwd())
45 changes: 35 additions & 10 deletions functions-python/validation_report_processor/src/main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# MobilityData 2023
# MobilityData 2024
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -15,6 +15,7 @@
#

import os
import logging
from datetime import datetime
import requests
import functions_framework
Expand All @@ -25,6 +26,9 @@
Gtfsdataset,
)
from helpers.database import start_db_session, close_db_session
from helpers.logger import Logger

logging.basicConfig(level=logging.INFO)

FILES_ENDPOINT = os.getenv("FILES_ENDPOINT")

Expand All @@ -37,7 +41,7 @@ def read_json_report(json_report_url):
:return: Dict representation of the JSON report
"""
response = requests.get(json_report_url)
return response.json()
return response.json(), response.status_code


def get_feature(feature_name, session):
Expand Down Expand Up @@ -85,16 +89,23 @@ def create_validation_report_entities(feed_stable_id, dataset_stable_id):
json_report_url = (
f"{FILES_ENDPOINT}/{feed_stable_id}/{dataset_stable_id}/report.json"
)
logging.info(f"Accessing JSON report at {json_report_url}.")
try:
json_report = read_json_report(json_report_url)
except Exception as error:
json_report, code = read_json_report(json_report_url)
if code != 200:
logging.error(f"Error reading JSON report: {code}")
return f"Error reading JSON report at url {json_report_url}.", code
except Exception as error: # JSONDecodeError or RequestException
print(f"Error reading JSON report: {error}")
return f"Error reading JSON report: {error}", 500
return f"Error reading JSON report at url {json_report_url}: {error}", 500

try:
dt = json_report["summary"]["validatedAt"]
validated_at = datetime.fromisoformat(dt.replace("Z", "+00:00"))
version = json_report["summary"]["validatorVersion"]
logging.info(
f"Validation report validated at {validated_at} with version {version}."
)
except Exception as error:
print(f"Error parsing JSON report: {error}")
return f"Error parsing JSON report: {error}", 500
Expand All @@ -104,16 +115,17 @@ def create_validation_report_entities(feed_stable_id, dataset_stable_id):
f"{FILES_ENDPOINT}/{feed_stable_id}/{dataset_stable_id}/report.html"
)

print(f"Creating validation report entities for {report_id}.")
print(f"JSON report URL: {json_report_url}")
print(f"HTML report URL: {html_report_url}")

logging.info(f"Creating validation report entities for {report_id}.")
session = None
try:
session = start_db_session(os.getenv("FEEDS_DATABASE_URL"))
logging.info("Database session started.")

# Validation Report Entity
if get_validation_report(report_id, session): # Check if report already exists
logging.info(f"Validation report {report_id} already exists. Terminating.")
return f"Validation report {report_id} already exists.", 409

validation_report_entity = Validationreport(
id=report_id,
validator_version=version,
Expand All @@ -139,13 +151,16 @@ def create_validation_report_entities(feed_stable_id, dataset_stable_id):
entities.append(notice_entity)
for entity in entities:
session.add(entity)
logging.info(f"Committing {len(entities)} entities to the database.")
session.commit()
logging.info("Entities committed successfully.")
return f"Created {len(entities)} entities.", 200
except Exception as error:
print(f"Error creating validation report entities: {error}")
return f"Error creating validation report entities: {error}", 500
finally:
close_db_session(session)
logging.info("Database session closed.")


def get_validation_report(report_id, session):
Expand All @@ -163,14 +178,24 @@ def process_validation_report(request):
:param request: Request object containing 'dataset_id' and 'feed_id'
:return: HTTP response indicating the result of the operation
"""
Logger.init_logger()
request_json = request.get_json(silent=True)
logging.info(
f"Processing validation report function called with request: {request_json}"
)
if (
not request_json
or "dataset_id" not in request_json
or "feed_id" not in request_json
):
return "Invalid request", 400
return (
f"Invalid request body: {request_json}. We expect 'dataset_id' and 'feed_id' to be present.",
400,
)

dataset_id = request_json["dataset_id"]
feed_id = request_json["feed_id"]
logging.info(
f"Processing validation report for dataset {dataset_id} in feed {feed_id}."
)
return create_validation_report_entities(feed_id, dataset_id)
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
import os
import unittest
from unittest import mock
from unittest.mock import MagicMock, patch

from faker import Faker

from database_gen.sqlacodegen_models import Feature
from database_gen.sqlacodegen_models import Gtfsdataset, Gtfsfeed, Validationreport
from helpers.database import start_db_session
from test_utils.database_utils import default_db_url
from validation_report_processor.src.main import (
read_json_report,
get_feature,
get_dataset,
create_validation_report_entities,
process_validation_report,
)

faker = Faker()


class TestValidationReportProcessor(unittest.TestCase):
@mock.patch("requests.get")
def test_read_json_report_success(self, mock_get):
"""Test read_json_report function with a successful response."""
mock_get.return_value = MagicMock(
status_code=200, json=lambda: {"key": "value"}
)
json_report_url = "http://example.com/report.json"
result, status = read_json_report(json_report_url)

self.assertEqual(result, {"key": "value"})
self.assertEqual(status, 200)
mock_get.assert_called_once_with(json_report_url)

@mock.patch("requests.get")
def test_read_json_report_failure(self, mock_get):
"""Test read_json_report function handling a non-200 response."""
mock_get.return_value = MagicMock(status_code=404).side_effect = Exception(
"404 Not Found"
)
json_report_url = "http://example.com/nonexistent.json"

with self.assertRaises(Exception):
read_json_report(json_report_url)

def test_get_feature(self):
"""Test get_feature function."""
session = start_db_session(default_db_url)
feature_name = faker.word()
feature = get_feature(feature_name, session)
session.add(feature)
same_feature = get_feature(feature_name, session)

self.assertIsInstance(feature, Feature)
self.assertEqual(feature.name, feature_name)
self.assertEqual(feature, same_feature)
session.rollback()
session.close()

def test_get_dataset(self):
"""Test get_dataset function."""
session = start_db_session(default_db_url)
dataset_stable_id = faker.word()
dataset = get_dataset(dataset_stable_id, session)
self.assertIsNone(dataset)

# Create GTFS Feed
feed = Gtfsfeed(id=faker.word(), data_type="gtfs", stable_id=faker.word())
# Create a new dataset
dataset = Gtfsdataset(
id=faker.word(), feed_id=feed.id, stable_id=dataset_stable_id, latest=True
)
try:
session.add(feed)
session.add(dataset)
returned_dataset = get_dataset(dataset_stable_id, session)
self.assertIsNotNone(returned_dataset)
self.assertEqual(returned_dataset, dataset)
except Exception as e:
session.rollback()
session.close()
raise e
finally:
session.rollback()
session.close()

@mock.patch.dict(os.environ, {"FEEDS_DATABASE_URL": default_db_url})
@mock.patch("requests.get")
def test_create_validation_report_entities(self, mock_get):
"""Test create_validation_report_entities function."""
mock_get.return_value = MagicMock(
status_code=200,
json=lambda: {
"summary": {
"validatedAt": "2021-01-01T00:00:00Z",
"validatorVersion": "1.0",
"gtfsFeatures": ["stops", "routes"],
},
"notices": [
{"code": "notice_code", "severity": "ERROR", "totalNotices": 1}
],
},
)
feed_stable_id = faker.word()
dataset_stable_id = faker.word()

# Create GTFS Feed
feed = Gtfsfeed(id=faker.word(), data_type="gtfs", stable_id=feed_stable_id)
# Create a new dataset
dataset = Gtfsdataset(
id=faker.word(), feed_id=feed.id, stable_id=dataset_stable_id, latest=True
)
session = start_db_session(default_db_url)
try:
session.add(feed)
session.add(dataset)
session.commit()
create_validation_report_entities(feed_stable_id, dataset_stable_id)

# Validate that the validation report was created
validation_report = (
session.query(Validationreport)
.filter(Validationreport.id == f"{dataset_stable_id}_1.0")
.one_or_none()
)
self.assertIsNotNone(validation_report)
except Exception as e:
raise e
finally:
session.rollback()
session.close()

@mock.patch.dict(os.environ, {"FEEDS_DATABASE_URL": default_db_url})
@mock.patch("requests.get")
def test_create_validation_report_entities_json_error1(self, mock_get):
"""Test create_validation_report_entities function with a JSON error."""
mock_get.return_value = MagicMock(
status_code=400,
json=lambda: {
"summary": {
"validatedAt": "2021-01-01T00:00:00Z",
"validatorVersion": "1.0",
"gtfsFeatures": ["stops", "routes"],
},
"notices": [
{"code": "notice_code", "severity": "ERROR", "totalNotices": 1}
],
},
)
feed_stable_id = faker.word()
dataset_stable_id = faker.word()
_, status = create_validation_report_entities(feed_stable_id, dataset_stable_id)
self.assertEqual(status, 400)

@mock.patch.dict(os.environ, {"FEEDS_DATABASE_URL": default_db_url})
@mock.patch("requests.get")
def test_create_validation_report_entities_json_error2(self, mock_get):
"""Test create_validation_report_entities function with JSON parsing exception."""
mock_get.return_value = MagicMock().side_effect = Exception(
"Exception occurred"
)
feed_stable_id = faker.word()
dataset_stable_id = faker.word()
_, status = create_validation_report_entities(feed_stable_id, dataset_stable_id)
self.assertEqual(status, 500)

@patch("validation_report_processor.src.main.Logger")
@patch("validation_report_processor.src.main.create_validation_report_entities")
def test_process_validation_report(self, create_validation_report_entities_mock, _):
request = MagicMock(
get_json=MagicMock(
return_value={
"dataset_id": faker.word(),
"feed_id": faker.word(),
}
)
)
process_validation_report(request)
create_validation_report_entities_mock.assert_called_once()

@patch("validation_report_processor.src.main.Logger")
@patch("validation_report_processor.src.main.create_validation_report_entities")
def test_process_validation_report_invalid_request(
self, create_validation_report_entities_mock, _
):
request = MagicMock(
get_json=MagicMock(
return_value={
"dataset_id": faker.word(),
}
)
)
__, status = process_validation_report(request)
self.assertEqual(status, 400)
create_validation_report_entities_mock.assert_not_called()
21 changes: 0 additions & 21 deletions infra/tmp/.terraform.lock.hcl

This file was deleted.

Loading

0 comments on commit 2b1d9d8

Please sign in to comment.