-
Notifications
You must be signed in to change notification settings - Fork 25
feat(queries): implement coordinate order validation and UserWarnings for spatial queries (#66) #105
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: develop
Are you sure you want to change the base?
feat(queries): implement coordinate order validation and UserWarnings for spatial queries (#66) #105
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,4 +1,5 @@ | ||
| import inspect | ||
| import pytest | ||
| import os | ||
| from datetime import datetime, timezone, timedelta | ||
| import json | ||
|
|
@@ -30,7 +31,7 @@ class TestGranuleClass(VCRTestCase): # type: ignore | |
| sort_key = "sort_key" | ||
|
|
||
| def _get_vcr_kwargs(self, **kwargs): | ||
| kwargs['decode_compressed_response'] = True | ||
| kwargs["decode_compressed_response"] = True | ||
| return kwargs | ||
|
|
||
| def _get_cassette_library_dir(self): | ||
|
|
@@ -86,21 +87,31 @@ def test_circle_set(self): | |
|
|
||
| def test_revision_date(self): | ||
| query = GranuleQuery() | ||
| granules = query.short_name("SWOT_L2_HR_RiverSP_reach_2.0").revision_date("2024-07-05", "2024-07-05").format( | ||
| "umm_json").get_all() | ||
| granules = ( | ||
| query.short_name("SWOT_L2_HR_RiverSP_reach_2.0") | ||
| .revision_date("2024-07-05", "2024-07-05") | ||
| .format("umm_json") | ||
| .get_all() | ||
| ) | ||
| granule_dict = {} | ||
| for granule in granules: | ||
| granule_json = json.loads(granule) | ||
| for item in granule_json["items"]: | ||
| native_id = item["meta"]["native-id"] | ||
| granule_dict[native_id] = item | ||
|
|
||
| self.assertIn("SWOT_L2_HR_RiverSP_Reach_017_312_AS_20240630T042656_20240630T042706_PIC0_01_swot", | ||
| granule_dict.keys()) | ||
| self.assertIn("SWOT_L2_HR_RiverSP_Reach_017_310_SI_20240630T023426_20240630T023433_PIC0_01_swot", | ||
| granule_dict.keys()) | ||
| self.assertIn("SWOT_L2_HR_RiverSP_Reach_017_333_EU_20240630T225156_20240630T225203_PIC0_01_swot", | ||
| granule_dict.keys()) | ||
| self.assertIn( | ||
| "SWOT_L2_HR_RiverSP_Reach_017_312_AS_20240630T042656_20240630T042706_PIC0_01_swot", | ||
| granule_dict.keys(), | ||
| ) | ||
| self.assertIn( | ||
| "SWOT_L2_HR_RiverSP_Reach_017_310_SI_20240630T023426_20240630T023433_PIC0_01_swot", | ||
| granule_dict.keys(), | ||
| ) | ||
| self.assertIn( | ||
| "SWOT_L2_HR_RiverSP_Reach_017_333_EU_20240630T225156_20240630T225203_PIC0_01_swot", | ||
| granule_dict.keys(), | ||
| ) | ||
|
|
||
| def test_temporal_invalid_strings(self): | ||
| query = GranuleQuery() | ||
|
|
@@ -129,40 +140,56 @@ def test_temporal_rounding(self): | |
| # one whole year | ||
| query.temporal("2016", "2016") | ||
| self.assertIn("temporal", query.params) | ||
| self.assertEqual(query.params["temporal"][0], "2016-01-01T00:00:00Z,2016-12-31T23:59:59Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][0], "2016-01-01T00:00:00Z,2016-12-31T23:59:59Z" | ||
| ) | ||
|
|
||
| # one whole month | ||
| query.temporal("2016-10", "2016-10") | ||
| self.assertEqual(query.params["temporal"][1], "2016-10-01T00:00:00Z,2016-10-31T23:59:59Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][1], "2016-10-01T00:00:00Z,2016-10-31T23:59:59Z" | ||
| ) | ||
|
|
||
| # one whole day, wrong way | ||
| query.temporal("2016-10-10", datetime(2016, 10, 10)) | ||
| self.assertNotEqual(query.params["temporal"][2], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z") | ||
| self.assertNotEqual( | ||
| query.params["temporal"][2], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z" | ||
| ) | ||
|
|
||
| # one whole day, right way | ||
| query.temporal("2016-10-10", datetime(2016, 10, 10).date()) | ||
| self.assertEqual(query.params["temporal"][3], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][3], "2016-10-10T00:00:00Z,2016-10-10T23:59:59Z" | ||
| ) | ||
|
|
||
| def test_temporal_tz_aware(self): | ||
| query = GranuleQuery() | ||
|
|
||
| tz = timezone(timedelta(hours=-3)) | ||
| query.temporal("2016-10-10T00:02:01-03:00", datetime(2016, 10, 10, 0, 2, 1, tzinfo=tz)) | ||
| query.temporal( | ||
| "2016-10-10T00:02:01-03:00", datetime(2016, 10, 10, 0, 2, 1, tzinfo=tz) | ||
| ) | ||
| self.assertIn("temporal", query.params) | ||
| self.assertEqual(query.params["temporal"][0], "2016-10-10T03:02:01Z,2016-10-10T03:02:01Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][0], "2016-10-10T03:02:01Z,2016-10-10T03:02:01Z" | ||
| ) | ||
|
|
||
| def test_temporal_set(self): | ||
| query = GranuleQuery() | ||
|
|
||
| # both strings | ||
| query.temporal("2016-10-10T01:02:03Z", "2016-10-12T09:08:07Z") | ||
| self.assertIn("temporal", query.params) | ||
| self.assertEqual(query.params["temporal"][0], "2016-10-10T01:02:03Z,2016-10-12T09:08:07Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][0], "2016-10-10T01:02:03Z,2016-10-12T09:08:07Z" | ||
| ) | ||
|
|
||
| # string and datetime | ||
| query.temporal("2016-10-10T01:02:03Z", datetime(2016, 10, 12, 9)) | ||
| self.assertIn("temporal", query.params) | ||
| self.assertEqual(query.params["temporal"][1], "2016-10-10T01:02:03Z,2016-10-12T09:00:00Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][1], "2016-10-10T01:02:03Z,2016-10-12T09:00:00Z" | ||
| ) | ||
|
|
||
| # string and None | ||
| query.temporal(datetime(2016, 10, 12, 10, 55, 7), None) | ||
|
|
@@ -172,12 +199,16 @@ def test_temporal_set(self): | |
| # both datetimes | ||
| query.temporal(datetime(2016, 10, 12, 10, 55, 7), datetime(2016, 10, 12, 11)) | ||
| self.assertIn("temporal", query.params) | ||
| self.assertEqual(query.params["temporal"][3], "2016-10-12T10:55:07Z,2016-10-12T11:00:00Z") | ||
| self.assertEqual( | ||
| query.params["temporal"][3], "2016-10-12T10:55:07Z,2016-10-12T11:00:00Z" | ||
| ) | ||
|
|
||
| def test_temporal_option_set(self): | ||
| query = GranuleQuery() | ||
|
|
||
| query.temporal("2016-10-10T01:02:03Z", "2016-10-12T09:08:07Z", exclude_boundary=True) | ||
| query.temporal( | ||
| "2016-10-10T01:02:03Z", "2016-10-12T09:08:07Z", exclude_boundary=True | ||
| ) | ||
| self.assertIn("exclude_boundary", query.options["temporal"]) | ||
| self.assertEqual(query.options["temporal"]["exclude_boundary"], True) | ||
|
|
||
|
|
@@ -261,24 +292,24 @@ def test_orbit_number_encode(self): | |
|
|
||
| def test_day_night_flag_day_set(self): | ||
| query = GranuleQuery() | ||
| query.day_night_flag('day') | ||
| query.day_night_flag("day") | ||
|
|
||
| self.assertIn(self.day_night_flag, query.params) | ||
| self.assertEqual(query.params[self.day_night_flag], 'day') | ||
| self.assertEqual(query.params[self.day_night_flag], "day") | ||
|
|
||
| def test_day_night_flag_night_set(self): | ||
| query = GranuleQuery() | ||
| query.day_night_flag('night') | ||
| query.day_night_flag("night") | ||
|
|
||
| self.assertIn(self.day_night_flag, query.params) | ||
| self.assertEqual(query.params[self.day_night_flag], 'night') | ||
| self.assertEqual(query.params[self.day_night_flag], "night") | ||
|
|
||
| def test_day_night_flag_unspecified_set(self): | ||
| query = GranuleQuery() | ||
| query.day_night_flag('unspecified') | ||
| query.day_night_flag("unspecified") | ||
|
|
||
| self.assertIn(self.day_night_flag, query.params) | ||
| self.assertEqual(query.params[self.day_night_flag], 'unspecified') | ||
| self.assertEqual(query.params[self.day_night_flag], "unspecified") | ||
|
|
||
| def test_day_night_flag_invalid_set(self): | ||
| query = GranuleQuery() | ||
|
|
@@ -452,36 +483,39 @@ def test_valid_spatial_state(self): | |
| self.assertTrue(query._valid_state()) | ||
|
|
||
| def _test_get(self): | ||
| """ Test real query """ | ||
| """Test real query""" | ||
|
|
||
| query = GranuleQuery() | ||
| query.short_name('MCD43A4').version('005') | ||
| query.short_name("MCD43A4").version("005") | ||
| query.temporal(datetime(2016, 1, 1), datetime(2016, 1, 1)) | ||
| results = query.get(limit=10) | ||
|
|
||
| self.assertEqual(len(results), 10) | ||
|
|
||
| def test_stac_output(self): | ||
| """ Test real query with STAC output type """ | ||
| """Test real query with STAC output type""" | ||
| # HLSL30: https://cmr.earthdata.nasa.gov/search/concepts/C2021957657-LPCLOUD | ||
| query = GranuleQuery() | ||
| search = query.parameters(point=(-105.78, 35.79), | ||
| temporal=('2021-02-01', '2021-03-01'), | ||
| collection_concept_id='C2021957657-LPCLOUD' | ||
| ) | ||
| search = query.parameters( | ||
| point=(-105.78, 35.79), | ||
| temporal=("2021-02-01", "2021-03-01"), | ||
| collection_concept_id="C2021957657-LPCLOUD", | ||
| ) | ||
| results = search.format("stac").get() | ||
| feature_collection = json.loads(results[0]) | ||
|
|
||
| self.assertEqual(len(results), 1) | ||
| self.assertEqual(feature_collection['type'], 'FeatureCollection') | ||
| self.assertEqual(feature_collection['numberMatched'], 2) | ||
| self.assertEqual(len(feature_collection['features']), 2) | ||
| self.assertEqual(feature_collection["type"], "FeatureCollection") | ||
| self.assertEqual(feature_collection["numberMatched"], 2) | ||
| self.assertEqual(len(feature_collection["features"]), 2) | ||
|
|
||
| def _test_hits(self): | ||
| """ integration test for hits() """ | ||
| """integration test for hits()""" | ||
|
|
||
| query = GranuleQuery() | ||
| query.short_name("AST_L1T").version("003").temporal("2016-10-26T01:30:00Z", "2016-10-26T01:40:00Z") | ||
| query.short_name("AST_L1T").version("003").temporal( | ||
| "2016-10-26T01:30:00Z", "2016-10-26T01:40:00Z" | ||
| ) | ||
| hits = query.hits() | ||
|
|
||
| self.assertEqual(hits, 3) | ||
|
|
@@ -514,7 +548,17 @@ def test_invalid_parameters(self): | |
|
|
||
| def test_valid_formats(self): | ||
| query = GranuleQuery() | ||
| formats = ["json", "xml", "echo10", "iso", "iso19115", "csv", "atom", "kml", "native"] | ||
| formats = [ | ||
| "json", | ||
| "xml", | ||
| "echo10", | ||
| "iso", | ||
| "iso19115", | ||
| "csv", | ||
| "atom", | ||
| "kml", | ||
| "native", | ||
| ] | ||
|
|
||
| for _format in formats: | ||
| query.format(_format) | ||
|
|
@@ -548,7 +592,9 @@ def test_valid_concept_id(self): | |
| self.assertEqual(query.params["concept_id"], ["C1299783579-LPDAAC_ECS"]) | ||
|
|
||
| query.concept_id(["C1299783579-LPDAAC_ECS", "G1441380236-PODAAC"]) | ||
| self.assertEqual(query.params["concept_id"], ["C1299783579-LPDAAC_ECS", "G1441380236-PODAAC"]) | ||
| self.assertEqual( | ||
| query.params["concept_id"], ["C1299783579-LPDAAC_ECS", "G1441380236-PODAAC"] | ||
| ) | ||
|
|
||
| def test_token(self): | ||
| query = GranuleQuery() | ||
|
|
@@ -575,3 +621,25 @@ def test_readable_granule_name(self): | |
|
|
||
| query.readable_granule_name(["*a*", "*b*"]) | ||
| self.assertEqual(query.params[self.readable_granule_name], ["*a*", "*b*"]) | ||
| # Asegúrate de que no haya espacios antes de 'def' | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. English, please. Also, please add a blank line above to separate it from the preceding method. |
||
| def test_wkt_coordinate_order_warning(): | ||
| """ | ||
| Ensure a warning is raised when coordinates appear to be in the wrong order (span > 180). | ||
| """ | ||
| # Usamos coordenadas que cruzan el antimeridiano (span > 180) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. English comments please |
||
| flipped_coords = [(170, 10), (-170, 10), (-170, -10), (170, -10), (170, 10)] | ||
| query = GranuleQuery() | ||
|
|
||
| # Verificamos que se dispare el UserWarning configurado en queries.py | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. English, please. |
||
| with pytest.warns(UserWarning, match="longitude span is greater than 180 degrees"): | ||
| query.polygon(flipped_coords) | ||
|
|
||
| def test_bounding_box_order_warning(): | ||
| """ | ||
| Verify warning for incorrect bounding box coordinate order. | ||
| """ | ||
| query = GranuleQuery() | ||
|
|
||
| # Verificamos la alerta de cruce de antimeridiano definida en queries.py | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. English comments, please. |
||
| with pytest.warns(UserWarning, match="crosses the antimeridian"): | ||
| query.bounding_box(10, 0, -10, 5) | ||
|
Comment on lines
+625
to
+645
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Indentation appears to be incorrect
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. That's based on an earlier comment I made, indicating that these test functions can be top level test functions rather than within the test case class, but that was before I saw that all of the tests in this file are within the class. I'm fine with these new functions also being methods within the class, but we should probably separate such methods into top level functions (in a separate PR) because most of the test methods in this class do not require vcrpy, so placing them within the class doesn't make sense (although does no harm either).
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ah I see your point. Agreed it can be refactored |
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.