Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

More informative log message when granule covers #77

Merged
merged 17 commits into from
May 5, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install -U pytest pytest-cov trollsift six netifaces watchdog posttroll pyyaml pyinotify
pip install -U pytest pytest-cov trollsift six netifaces watchdog posttroll pyyaml pyinotify pyresample pytroll-schedule
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catches!

- name: Install pytroll-collectors
run: |
pip install --no-deps -e .
Expand All @@ -32,4 +32,4 @@ jobs:
uses: codecov/codecov-action@v1
with:
file: ./coverage.xml
env_vars: PYTHON_VERSION
env_vars: PYTHON_VERSION
39 changes: 12 additions & 27 deletions pytroll_collectors/region_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def collect(self, granule_metadata):

granule_metadata['end_time'] = end_time

LOG.debug("Adding area ID to metadata: %s", str(self.region.area_id))
LOG.debug(f"Adding area ID {self.region.area_id!s} to metadata for {platform!s}")
granule_metadata['collection_area_id'] = self.region.area_id

self.last_file_added = False
Expand All @@ -102,35 +102,17 @@ def collect(self, granule_metadata):
self.granule_times.add(ptime)
self.granules.append(granule_metadata)
self.last_file_added = True
LOG.info("Added %s (%s) granule to area %s",
LOG.info("Added expected granule %s (%s) to area %s",
platform,
str(start_time),
self.region.area_id)
# If last granule return swath and cleanup
# if self.granule_times == self.planned_granule_times:
if self.is_swath_complete():
LOG.info("Collection finished for area: %s",
str(self.region.area_id))
LOG.info(f"Collection finished for {platform!s} area {self.region.area_id!s}")
return self.finish()
else:
try:
new_timeout = (max(self.planned_granule_times -
self.granule_times) +
self.granule_duration +
self.timeliness)
except ValueError:
LOG.error("Calculation of new timeout failed, "
"keeping previous timeout.")
LOG.error("Planned: %s", self.planned_granule_times)
LOG.error("Received: %s", self.granule_times)
return

if new_timeout < self.timeout:
self.timeout = new_timeout
LOG.info("Adjusted timeout: %s",
self.timeout.isoformat())

return

return

# Get corners from input data

Expand All @@ -151,7 +133,10 @@ def collect(self, granule_metadata):

# If file is within region, make pass prediction to know what to wait
# for
if granule_pass.area_coverage(self.region) > 0:
cov = granule_pass.area_coverage(self.region)
if cov > 0:
LOG.debug(f"Granule {granule_metadata['uri']:s} is overlapping "
f"region {self.region.name:s} by fraction {cov:.5f}")
self.granule_times.add(start_time)
self.granules.append(granule_metadata)
self.last_file_added = True
Expand All @@ -160,7 +145,7 @@ def collect(self, granule_metadata):

if not self.planned_granule_times:
self.planned_granule_times.add(start_time)
LOG.info("Added %s (%s) granule to area %s",
LOG.info("Added new overlapping granule %s (%s) to area %s",
platform,
str(start_time),
self.region.area_id)
Expand All @@ -186,8 +171,8 @@ def collect(self, granule_metadata):
break
self.planned_granule_times.add(gr_time)

LOG.info("Planned granules for %s: %s", self.region.name,
str(sorted(self.planned_granule_times)))
LOG.info(f"Planned granules for {platform!s} over "
f"{self.region.name:s}: {sorted(self.planned_granule_times)!s}")
self.timeout = (max(self.planned_granule_times) +
self.granule_duration +
self.timeliness)
Expand Down
139 changes: 139 additions & 0 deletions pytroll_collectors/tests/test_region_collector.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
"""Test region collector functionality."""

import logging
import pytest
import datetime
import unittest.mock
import io

yaml_europe = """
euro_ma:
description: euro_ma
projection:
proj: stere
lat_0: 45
lon_0: 15
k: 1
x_0: 0
y_0: 0
ellps: WGS84
no_defs: null
shape:
height: 1069
width: 1538
area_extent:
lower_left_xy:
- -3845890.2472199923
- -2150868.4484187816
upper_right_xy:
- 3845890.2472199923
- 3198354.325865823
units: m
"""

tles = b"""
METOP-C
1 43689U 18087A 21101.60865186 .00000002 00000-0 20894-4 0 9998
2 43689 98.6928 163.0161 0002296 181.8672 178.2497 14.21491657125954
"""


@pytest.fixture
def europe():
"""Return european AreaDefinition."""
from pyresample.area_config import load_area_from_string
return load_area_from_string(yaml_europe)


@pytest.fixture
def europe_collector(europe):
"""Construct RegionCollector for Central Europe."""
from pytroll_collectors.region_collector import RegionCollector
return RegionCollector(europe)


def _fakeopen(url):
return io.BytesIO(tles)


def test_init(europe):
"""Test that initialisation appears to work."""
from pytroll_collectors.region_collector import RegionCollector
RegionCollector(europe)


@unittest.mock.patch("pyorbital.tlefile.urlopen", new=_fakeopen)
def test_collect(europe_collector, caplog):
"""Test that granules can be collected."""
granule_metadata = {
"platform_name": "Metop-C",
"sensor": "avhrr"}

with caplog.at_level(logging.DEBUG):
for s_min in (0, 3, 6, 9, 12, 15, 18):
europe_collector.collect(
{**granule_metadata,
**{"start_time": datetime.datetime(2021, 4, 11, 10, s_min, 0),
"end_time": datetime.datetime(2021, 4, 11, 10, s_min+3, 0),
"uri": f"file://{s_min:d}"}})

assert "Granule file://0 is overlapping region euro_ma by fraction 0.03685" in caplog.text
assert "Added new overlapping granule Metop-C (2021-04-11 10:00:00) to area euro_ma" in caplog.text
assert "Collection finished for Metop-C area euro_ma" in caplog.text
for n in (3, 6, 9, 12, 15):
assert f"Added expected granule Metop-C (2021-04-11 10:{n:>02d}:00) to area euro_ma" in caplog.text
assert "Granule file://18 is not overlapping euro_ma"


@unittest.mock.patch("pyorbital.tlefile.urlopen", new=_fakeopen)
def test_collect_duration(europe):
"""Test with tle_platform_name, without end_time, using call syntax."""
from pytroll_collectors.region_collector import RegionCollector
alt_europe_collector = RegionCollector(
europe,
timeliness=datetime.timedelta(seconds=300),
granule_duration=datetime.timedelta(seconds=120))
granule_metadata = {
"sensor": ["avhrr"],
"tle_platform_name": "Metop-C",
"start_time": datetime.datetime(2021, 4, 11, 0, 0)}
alt_europe_collector(granule_metadata)


@unittest.mock.patch("pyorbital.tlefile.urlopen", new=_fakeopen)
def test_adjust_timeout(europe, caplog):
"""Test timeout adjustment."""
from pytroll_collectors.region_collector import RegionCollector
granule_metadata = {
"sensor": "avhrr",
"tle_platform_name": "Metop-C",
"uri": "file://alt/0"}
alt_europe_collector = RegionCollector(
europe,
granule_duration=datetime.timedelta(seconds=180))

with caplog.at_level(logging.DEBUG):
alt_europe_collector.collect(
{**granule_metadata,
"start_time": datetime.datetime(2021, 4, 11, 10, 0)})
alt_europe_collector.collect(
{**granule_metadata,
"start_time": datetime.datetime(2021, 4, 11, 10, 15)})
alt_europe_collector.collect(
{**granule_metadata,
"start_time": datetime.datetime(2021, 4, 11, 10, 12)})
assert "Adjusted timeout" in caplog.text


@pytest.mark.skip(reason="test never finishes")
@unittest.mock.patch("pyorbital.tlefile.urlopen", new=_fakeopen)
def test_faulty_end_time(europe_collector, caplog):
"""Test adapting if end_time before start_time."""
granule_metadata = {
"platform_name": "Metop-C",
"sensor": "avhrr",
"start_time": datetime.datetime(2021, 4, 11, 0, 0),
"end_time": datetime.datetime(2021, 4, 10, 23, 58)}
with caplog.at_level(logging.DEBUG):
europe_collector(granule_metadata)
assert "Adjusted end time" in caplog.text