diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2d1f1b5..76151d1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -10,10 +10,10 @@ jobs: fail-fast: true matrix: os: ["ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10"] + python-version: ["3.9", "3.10", "3.11"] experimental: [false] include: - - python-version: "3.10" + - python-version: "3.11" os: "ubuntu-latest" experimental: true @@ -25,18 +25,34 @@ jobs: steps: - name: Checkout source - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v2 with: - miniconda-version: "latest" + miniforge-variant: Mambaforge + miniforge-version: latest + use-mamba: true python-version: ${{ matrix.python-version }} - mamba-version: "*" - channels: conda-forge,defaults - environment-file: continuous_integration/environment.yaml activate-environment: test-environment + - name: Set cache environment variables + shell: bash -l {0} + run: | + echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") + echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV + + - uses: actions/cache@v3 + with: + path: ${{ env.CONDA_PREFIX }} + key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} + id: cache + + - name: Update environment + run: mamba env update -n test-environment -f continuous_integration/environment.yaml + if: steps.cache.outputs.cache-hit != 'true' + - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} diff --git a/activefires_pp/post_processing.py b/activefires_pp/post_processing.py index eadb315..bd9950f 100644 --- a/activefires_pp/post_processing.py +++ b/activefires_pp/post_processing.py @@ -69,6 +69,8 @@ # COL_NAMES = ["latitude", "longitude", "tb", "along_scan_res", "along_track_res", "conf", "power"] +NO_FIRES_TEXT = 'No fire detections for this granule' + logger = logging.getLogger(__name__) logging.getLogger("fiona").setLevel(logging.WARNING) @@ -251,8 +253,8 @@ def store(output_filename, detections): return None -def store_geojson(output_filename, detections, platform_name=None): - """Store the filtered AF detections in Geojson format on disk.""" +def geojson_feature_collection_from_detections(detections, platform_name=None): + """Create the Geojson feature collection from fire detection data.""" if len(detections) == 0: logger.debug("No detections to save!") return None @@ -280,7 +282,11 @@ def store_geojson(output_filename, detections, platform_name=None): properties=prop) features.append(feat) - feature_collection = FeatureCollection(features) + return FeatureCollection(features) + + +def store_geojson(output_filename, feature_collection): + """Store the Geojson feature collection of fire detections on disk.""" path = os.path.dirname(output_filename) if not os.path.exists(path): logger.info("Create directory: %s", path) @@ -289,8 +295,6 @@ def store_geojson(output_filename, detections, platform_name=None): with open(output_filename, 'w') as fpt: dump(feature_collection, fpt) - return output_filename - def get_mask_from_multipolygon(points, geometry, start_idx=1): """Get mask for points from a shapely Multipolygon.""" @@ -346,7 +350,6 @@ def __init__(self, configfile, shp_borders, shp_mask, regional_filtermask=None): self._set_options_from_config(config) self.host = socket.gethostname() - self.timezone = self.options.get('timezone', 'GMT') self.input_topic = self.options['subscribe_topics'][0] @@ -355,10 +358,14 @@ def __init__(self, configfile, shp_borders, shp_mask, regional_filtermask=None): self.outfile_pattern_national = self.options.get('geojson_file_pattern_national') self.outfile_pattern_regional = self.options.get('geojson_file_pattern_regional') self.output_dir = self.options.get('output_dir', '/tmp') + self.filepath_detection_id_cache = self.options.get('filepath_detection_id_cache') frmt = self.options['regional_shapefiles_format'] self.regional_shapefiles_globstr = globify(frmt) + self._fire_detection_id = None + self._initialize_fire_detection_id() + self.listener = None self.publisher = None self.loop = False @@ -402,6 +409,88 @@ def signal_shutdown(self, *args, **kwargs): """Shutdown the Active Fires postprocessing.""" self.close() + def check_incoming_message_and_get_filename(self, msg): + """Check the message content and return filename if okay.""" + if msg.type not in ['file', 'collection', 'dataset']: + logger.debug("Message type not supported: %s", str(msg.type)) + return None + + filename = get_filename_from_uri(msg.data.get('uri')) + if not os.path.exists(filename): + logger.warning("File does not exist: %s", filename) + return None + + file_ok = check_file_type_okay(msg.data.get('type')) + if not file_ok: + output_messages = self._generate_no_fires_messages(msg, NO_FIRES_TEXT) + for output_msg in output_messages: + logger.debug("Sending message: %s", str(output_msg)) + self.publisher.send(str(output_msg)) + return None + + return filename + + def do_postprocessing_on_message(self, msg, filename): + """Do the fires post processing on a message.""" + platform_name = msg.data.get('platform_name') + af_shapeff = ActiveFiresShapefileFiltering(filename, platform_name=platform_name, + timezone=self.timezone) + afdata = af_shapeff.get_af_data(self.infile_pattern) + if len(afdata) == 0: + output_messages = self._generate_no_fires_messages(msg, NO_FIRES_TEXT) + for output_msg in output_messages: + logger.debug("Sending message: %s", str(output_msg)) + self.publisher.send(str(output_msg)) + return + + afdata = self.fires_filtering(msg, af_shapeff) + logger.debug("After fires_filtering...: Number of fire detections = %d", len(afdata)) + if len(afdata) == 0: + logger.debug("No fires - so no regional filtering to be done!") + return + + # It is here that we should add a uniue day-ID to each of the detections! + afdata = self.add_unique_day_id(afdata) + self.save_id_to_file() + + # 1) Create geojson feature collection + # 2) Dump geojson data to disk + feature_collection = geojson_feature_collection_from_detections(afdata, + platform_name=af_shapeff.platform_name) + + fmda = af_shapeff.metadata + pout = Parser(self.outfile_pattern_national) + out_filepath = os.path.join(self.output_dir, pout.compose(fmda)) + logger.debug("Output file path = %s", out_filepath) + + if feature_collection is None: + logger.info("No geojson file created, number of fires after filtering = %d", len(afdata)) + output_messages = self._generate_no_fires_messages(msg, + 'No true fire detections inside National borders') # noqa + else: + store_geojson(out_filepath, feature_collection) + output_messages = self.get_output_messages(out_filepath, msg, len(afdata)) + + for output_msg in output_messages: + if output_msg: + logger.debug("Sending message: %s", str(output_msg)) + self.publisher.send(str(output_msg)) + + # Do the regional filtering now: + if not self.regional_filtermask: + logger.info("No regional filtering is attempted.") + return + + # FIXME! If afdata is empty (len=0) then it seems all data are inside all regions! + af_shapeff = ActiveFiresShapefileFiltering(afdata=afdata, platform_name=platform_name, + timezone=self.timezone) + regional_fmask = af_shapeff.get_regional_filtermasks(self.regional_filtermask, + globstr=self.regional_shapefiles_globstr) + regional_messages = self.regional_fires_filtering_and_publishing(msg, regional_fmask, af_shapeff) + for region_msg in regional_messages: + logger.debug("Sending message: %s", str(region_msg)) + self.publisher.send(str(region_msg)) + def run(self): """Run the AF post processing.""" while self.loop: @@ -411,60 +500,11 @@ def run(self): except Empty: continue else: - if msg.type not in ['file', 'collection', 'dataset']: - logger.debug("Message type not supported: %s", str(msg.type)) - continue - - platform_name = msg.data.get('platform_name') - filename = get_filename_from_uri(msg.data.get('uri')) - if not os.path.exists(filename): - logger.warning("File does not exist!") + filename = self.check_incoming_message_and_get_filename(msg) + if not filename: continue - file_ok = check_file_type_okay(msg.data.get('type')) - no_fires_text = 'No fire detections for this granule' - output_messages = self._generate_no_fires_messages(msg, no_fires_text) - if not file_ok: - for output_msg in output_messages: - logger.debug("Sending message: %s", str(output_msg)) - self.publisher.send(str(output_msg)) - continue - - af_shapeff = ActiveFiresShapefileFiltering(filename, platform_name=platform_name, - timezone=self.timezone) - afdata = af_shapeff.get_af_data(self.infile_pattern) - - if len(afdata) == 0: - logger.debug("Sending message: %s", str(output_msg)) - self.publisher.send(str(output_msg)) - continue - - output_messages, afdata = self.fires_filtering(msg, af_shapeff) - logger.debug("After fires_filtering...: Number of messages = %d", len(output_messages)) - - for output_msg in output_messages: - if output_msg: - logger.debug("Sending message: %s", str(output_msg)) - self.publisher.send(str(output_msg)) - - # Do the regional filtering now: - if not self.regional_filtermask: - logger.info("No regional filtering is attempted.") - continue - - if len(afdata) == 0: - logger.debug("No fires - so no regional filtering to be done!") - continue - - # FIXME! If afdata is empty (len=0) then it seems all data are inside all regions! - af_shapeff = ActiveFiresShapefileFiltering(afdata=afdata, platform_name=platform_name, - timezone=self.timezone) - regional_fmask = af_shapeff.get_regional_filtermasks(self.regional_filtermask, - globstr=self.regional_shapefiles_globstr) - regional_messages = self.regional_fires_filtering_and_publishing(msg, regional_fmask, af_shapeff) - for region_msg in regional_messages: - logger.debug("Sending message: %s", str(region_msg)) - self.publisher.send(str(region_msg)) + self.do_postprocessing_on_message(msg, filename) def regional_fires_filtering_and_publishing(self, msg, regional_fmask, afsff_obj): """From the regional-fires-filter-mask and the fire detection data send regional messages.""" @@ -489,13 +529,18 @@ def regional_fires_filtering_and_publishing(self, msg, regional_fmask, afsff_obj out_filepath = os.path.join(self.output_dir, pout.compose(fmda)) logger.debug("Output file path = %s", out_filepath) data_in_region = afdata[regional_fmask[region_name]['mask']] - filepath = store_geojson(out_filepath, data_in_region, platform_name=fmda['platform']) - if not filepath: + + # filepath = store_geojson(out_filepath, data_in_region, platform_name=fmda['platform']) + feature_collection = geojson_feature_collection_from_detections(data_in_region, + platform_name=fmda['platform']) + if feature_collection is None: logger.warning("Something wrong happended storing regional " + "data to Geojson - area: {name}".format(name=str(region_name))) continue - outmsg = self._generate_output_message(filepath, msg, regional_fmask[region_name]) + store_geojson(out_filepath, feature_collection) + + outmsg = self._generate_output_message(out_filepath, msg, regional_fmask[region_name]) output_messages.append(outmsg) logger.info("Geojson file created! Number of fires in region = %d", len(data_in_region)) @@ -531,20 +576,12 @@ def fires_filtering(self, msg, af_shapeff): afdata_ff = af_shapeff.get_af_data() logger.debug("After fires_filtering: Number of fire detections left: %d", len(afdata_ff)) - filepath = store_geojson(out_filepath, afdata_ff, platform_name=af_shapeff.platform_name) - out_messages = self.get_output_messages(filepath, msg, len(afdata_ff)) - - return out_messages, afdata_ff + return afdata_ff def get_output_messages(self, filepath, msg, number_of_data): """Generate the adequate output message(s) depending on if an output file was created or not.""" - if filepath: - logger.info("geojson file created! Number of fires after filtering = %d", number_of_data) - return [self._generate_output_message(filepath, msg)] - else: - logger.info("No geojson file created, number of fires after filtering = %d", number_of_data) - return self._generate_no_fires_messages(msg, - 'No true fire detections inside National borders') + logger.info("Geojson file created! Number of fires = %d", number_of_data) + return [self._generate_output_message(filepath, msg)] def _generate_output_message(self, filepath, input_msg, region=None): """Create the output message to publish.""" @@ -574,6 +611,60 @@ def _check_borders_shapes_exists(self): if not os.path.exists(self.shp_borders): raise OSError("Shape file does not exist! Filename = %s" % self.shp_borders) + def _initialize_fire_detection_id(self): + """Initialize the fire detection ID.""" + if self.filepath_detection_id_cache and os.path.exists(self.filepath_detection_id_cache): + self._fire_detection_id = self.get_id_from_file() + else: + self._fire_detection_id = {'date': datetime.utcnow(), 'counter': 0} + + def update_fire_detection_id(self): + """Update the fire detection ID registry.""" + now = datetime.utcnow() + tdelta = now - self._fire_detection_id['date'] + if tdelta.total_seconds() > 24*3600: + self._initialize_fire_detection_id() + elif tdelta.total_seconds() > 0 and self._fire_detection_id['date'].day != now.day: + self._initialize_fire_detection_id() + + self._fire_detection_id['counter'] = self._fire_detection_id['counter'] + 1 + + def save_id_to_file(self): + """Save the (current) detection id on disk.""" + with open(self.filepath_detection_id_cache, 'w') as fpt: + id_ = self._create_id_string() + fpt.write(id_) + + def get_id_from_file(self): + """Read the latest stored detection id string from disk and convert to internal format.""" + with open(self.filepath_detection_id_cache, 'r') as fpt: + idstr = fpt.read() + + return self._get_id_from_string(idstr) + + def _get_id_from_string(self, idstr): + """Get the detection id from string.""" + datestr, counter = idstr.split('-') + return {'date': datetime.strptime(datestr, '%Y%m%d'), + 'counter': int(counter)} + + def _create_id_string(self): + """From the internal fire detection id create the id string to be exposed to the user.""" + return (self._fire_detection_id['date'].strftime('%Y%m%d') + + '-' + str(self._fire_detection_id['counter'])) + + def add_unique_day_id(self, afdata): + """Add a unique detection id - date + a running number for the day.""" + # Add id's to the detections: + id_list = [] + for _i in range(len(afdata)): + self.update_fire_detection_id() + id_ = self._create_id_string() + id_list.append(id_) + + afdata['detection_id'] = id_list + return afdata + def close(self): """Shutdown the Active Fires postprocessing.""" logger.info('Terminating Active Fires post processing.') diff --git a/activefires_pp/tests/test_fire_detection_id_handling.py b/activefires_pp/tests/test_fire_detection_id_handling.py new file mode 100644 index 0000000..39b5951 --- /dev/null +++ b/activefires_pp/tests/test_fire_detection_id_handling.py @@ -0,0 +1,323 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2023 Adam.Dybbroe + +# Author(s): + +# Adam.Dybbroe + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Test operations on the fire detection id.""" + +from unittest.mock import patch +from unittest import TestCase +import pandas as pd + +import io +from datetime import datetime +from freezegun import freeze_time + +from activefires_pp.post_processing import ActiveFiresShapefileFiltering +from activefires_pp.post_processing import ActiveFiresPostprocessing +from activefires_pp.post_processing import COL_NAMES + + +TEST_ACTIVE_FIRES_FILEPATH2 = "./AFIMG_npp_d20230616_t1110054_e1111296_b60284_c20230616112418557033_cspp_dev.txt" +TEST_ACTIVE_FIRES_FILEPATH3 = "./AFIMG_j01_d20230617_t1140564_e1142209_b28903_c20230617115513873196_cspp_dev.txt" +TEST_ACTIVE_FIRES_FILEPATH4 = "./AFIMG_j01_d20230618_t0942269_e0943514_b28916_c20230618095604331171_cspp_dev.txt" + + +# Here we have sorted out all detections not passing the filter mask! +# So, 4 fire detections are left corresponding to what would end up in the geojson files: +TEST_ACTIVE_FIRES_FILE_DATA2 = """ +# Active Fires I-band EDR +# +# source: AFIMG_npp_d20230616_t1110054_e1111296_b60284_c20230616112418557033_cspp_dev.nc +# version: CSPP Active Fires version: cspp-active-fire-noaa_1.1.0 +# +# column 1: latitude of fire pixel (degrees) +# column 2: longitude of fire pixel (degrees) +# column 3: I04 brightness temperature of fire pixel (K) +# column 4: Along-scan fire pixel resolution (km) +# column 5: Along-track fire pixel resolution (km) +# column 6: detection confidence ([7,8,9]->[lo,med,hi]) +# column 7: fire radiative power (MW) +# +# number of fire pixels: 14 +# + 62.65801239, 17.25905228, 339.66326904, 0.375, 0.375, 8, 2.51202917 + 64.21694183, 17.42074966, 329.65161133, 0.375, 0.375, 8, 3.39806151 + 64.56904602, 16.60095215, 346.52050781, 0.375, 0.375, 8, 20.59289360 + 64.57222748, 16.59840012, 348.72860718, 0.375, 0.375, 8, 20.59289360 +""" + +# Here we have sorted out all detections not passing the filter mask! +# So, 1 fire detection is left corresponding to what would end up in the geojson files: +TEST_ACTIVE_FIRES_FILE_DATA3 = """ +# Active Fires I-band EDR +# +# source: AFIMG_j01_d20230617_t1140564_e1142209_b28903_c20230617115513873196_cspp_dev.nc +# version: CSPP Active Fires version: cspp-active-fire-noaa_1.1.0 +# +# column 1: latitude of fire pixel (degrees) +# column 2: longitude of fire pixel (degrees) +# column 3: I04 brightness temperature of fire pixel (K) +# column 4: Along-scan fire pixel resolution (km) +# column 5: Along-track fire pixel resolution (km) +# column 6: detection confidence ([7,8,9]->[lo,med,hi]) +# column 7: fire radiative power (MW) +# +# number of fire pixels: 9 +# + 64.46707153, 17.65028381, 330.15390015, 0.375, 0.375, 8, 3.75669074 +""" + + +# Here we have sorted out all detections not passing the filter mask! +# So, 2 fire detections are left corresponding to what would end up in the geojson files: +TEST_ACTIVE_FIRES_FILE_DATA4 = """ +# Active Fires I-band EDR +# +# source: AFIMG_j01_d20230618_t0942269_e0943514_b28916_c20230618095604331171_cspp_dev.nc +# version: CSPP Active Fires version: cspp-active-fire-noaa_1.1.0 +# +# column 1: latitude of fire pixel (degrees) +# column 2: longitude of fire pixel (degrees) +# column 3: I04 brightness temperature of fire pixel (K) +# column 4: Along-scan fire pixel resolution (km) +# column 5: Along-track fire pixel resolution (km) +# column 6: detection confidence ([7,8,9]->[lo,med,hi]) +# column 7: fire radiative power (MW) +# +# number of fire pixels: 10 +# + 65.55922699, 17.62709618, 335.81488037, 0.375, 0.375, 8, 4.66374302 + 67.27209473, 20.14731216, 348.89843750, 0.375, 0.375, 8, 11.79477501 +""" + +CONFIG_EXAMPLE = {'publish_topic': '/VIIRS/L2/Fires/PP', + 'subscribe_topics': 'VIIRS/L2/AFI', + 'af_pattern_ibands': + 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_hour:%H%M%S%f}' + + '_b{orbit:s}_c{processing_time:%Y%m%d%H%M%S%f}_cspp_dev.txt', + 'geojson_file_pattern_national': 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S}.geojson', + 'geojson_file_pattern_regional': 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S}_' + + '{region_name:s}.geojson', + 'regional_shapefiles_format': 'omr_{region_code:s}_Buffer.{ext:s}', + 'output_dir': '/path/where/the/filtered/results/will/be/stored', + 'filepath_detection_id_cache': '/path/to/the/detection_id/cache', + 'timezone': 'Europe/Stockholm'} + +MY_FILE_PATTERN = ("AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_hour:%H%M%S%f}_" + + "b{orbit:s}_c{processing_time:%Y%m%d%H%M%S%f}_cspp_dev.txt") + + +@freeze_time('2023-06-16 11:24:00') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +@patch('activefires_pp.post_processing._read_data') +def test_add_unique_day_id_to_detections_sameday(readdata, setup_comm, get_config, gethostname): + """Test adding unique id's to the fire detection data.""" + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + + myfilepath = TEST_ACTIVE_FIRES_FILEPATH2 + + fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA2) + afdata = pd.read_csv(fstream, index_col=None, header=None, comment='#', names=COL_NAMES) + readdata.return_value = afdata + + this = ActiveFiresShapefileFiltering(filepath=myfilepath, timezone='GMT') + with patch('os.path.exists') as mypatch: + mypatch.return_value = True + afdata = this.get_af_data(filepattern=MY_FILE_PATTERN, localtime=False) + + TestCase().assertDictEqual(afpp._fire_detection_id, {'date': datetime.utcnow(), + 'counter': 0}) + + # 4 fire detections, so (current) ID should be raised by 4 + afdata = afpp.add_unique_day_id(afdata) + assert 'detection_id' in afdata + assert afdata['detection_id'].values.tolist() == ['20230616-1', '20230616-2', + '20230616-3', '20230616-4'] + TestCase().assertDictEqual(afpp._fire_detection_id, {'date': datetime.utcnow(), + 'counter': 4}) + + +@freeze_time('2023-06-17 11:55:00') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +@patch('activefires_pp.post_processing._read_data') +def test_add_unique_day_id_to_detections_24hours_plus(readdata, setup_comm, + get_config, gethostname): + """Test adding unique id's to the fire detection data.""" + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp._fire_detection_id = {'date': datetime(2023, 6, 16, 11, 24, 0), 'counter': 4} + + myfilepath = TEST_ACTIVE_FIRES_FILEPATH3 + + fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA3) + afdata = pd.read_csv(fstream, index_col=None, header=None, comment='#', names=COL_NAMES) + readdata.return_value = afdata + + this = ActiveFiresShapefileFiltering(filepath=myfilepath, timezone='GMT') + with patch('os.path.exists') as mypatch: + mypatch.return_value = True + afdata = this.get_af_data(filepattern=MY_FILE_PATTERN, localtime=False) + + TestCase().assertDictEqual(afpp._fire_detection_id, {'date': datetime(2023, 6, 16, 11, 24, 0), + 'counter': 4}) + # 1 new fire detection, so (current) ID should be raised - a new day, so id + # starting over from 0, and a new date! + afdata = afpp.add_unique_day_id(afdata) + assert 'detection_id' in afdata + assert afdata['detection_id'].values.tolist() == ['20230617-1'] + TestCase().assertDictEqual(afpp._fire_detection_id, {'date': datetime(2023, 6, 17, 11, 55, 0), + 'counter': 1}) + + +@freeze_time('2023-06-18 09:56:00') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +@patch('activefires_pp.post_processing._read_data') +def test_add_unique_day_id_to_detections_newday(readdata, setup_comm, get_config, gethostname): + """Test adding unique id's to the fire detection data.""" + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp._fire_detection_id = {'date': datetime(2023, 6, 17, 11, 55, 0), 'counter': 1} + + myfilepath = TEST_ACTIVE_FIRES_FILEPATH4 + + fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA4) + afdata = pd.read_csv(fstream, index_col=None, header=None, comment='#', names=COL_NAMES) + readdata.return_value = afdata + + this = ActiveFiresShapefileFiltering(filepath=myfilepath, timezone='GMT') + with patch('os.path.exists') as mypatch: + mypatch.return_value = True + afdata = this.get_af_data(filepattern=MY_FILE_PATTERN, localtime=False) + + TestCase().assertDictEqual(afpp._fire_detection_id, {'date': datetime(2023, 6, 17, 11, 55, 0), + 'counter': 1}) + # 2 new fire detections, so (current) ID should be raised - a new day, so id + # starting over from 0, and a new date! + afdata = afpp.add_unique_day_id(afdata) + assert 'detection_id' in afdata + assert afdata['detection_id'].values.tolist() == ['20230618-1', '20230618-2'] + TestCase().assertDictEqual(afpp._fire_detection_id, {'date': datetime(2023, 6, 18, 9, 56, 0), + 'counter': 2}) + + +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +@patch('activefires_pp.post_processing._read_data') +def test_store_fire_detection_id_on_disk(readdata, setup_comm, + get_config, gethostname, tmp_path): + """Test store the latest/current detection id to a file.""" + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp._fire_detection_id = {'date': datetime(2023, 6, 17, 11, 55, 0), 'counter': 1} + + detection_id_cache = tmp_path / 'detection_id_cache.txt' + afpp.filepath_detection_id_cache = str(detection_id_cache) + afpp.save_id_to_file() + + with open(afpp.filepath_detection_id_cache) as fpt: + result = fpt.read() + + assert result == '20230617-1' + + +@freeze_time('2023-06-18 12:00:00') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +@patch('activefires_pp.post_processing._read_data') +def test_initialize_fire_detection_id_nofile(readdata, setup_comm, + get_config, gethostname, tmp_path): + """Test initialize the fire detection id with no cache on disk.""" + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + + assert afpp.filepath_detection_id_cache == '/path/to/the/detection_id/cache' + + expected = {'date': datetime(2023, 6, 18, 12, 0, 0), 'counter': 0} + + afpp._initialize_fire_detection_id() + TestCase().assertDictEqual(afpp._fire_detection_id, expected) + + +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +@patch('activefires_pp.post_processing._read_data') +def test_get_fire_detection_id_from_file(readdata, setup_comm, + get_config, gethostname, tmp_path): + """Test rtrieve the detection id from file.""" + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp._fire_detection_id = {'date': datetime(2023, 6, 17, 11, 55, 0), 'counter': 1} + + detection_id_cache = tmp_path / 'detection_id_cache.txt' + afpp.filepath_detection_id_cache = str(detection_id_cache) + afpp.save_id_to_file() + result = afpp.get_id_from_file() + expected = {'date': datetime(2023, 6, 17), 'counter': 1} + TestCase().assertDictEqual(result, expected) + + afpp._initialize_fire_detection_id() + TestCase().assertDictEqual(afpp._fire_detection_id, expected) diff --git a/activefires_pp/tests/test_fire_notifications.py b/activefires_pp/tests/test_fire_notifications.py index 26aa0d0..f8f32f0 100644 --- a/activefires_pp/tests/test_fire_notifications.py +++ b/activefires_pp/tests/test_fire_notifications.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021, 2022 Adam Dybbroe +# Copyright (c) 2021 - 2023 Adam Dybbroe # Author(s): @@ -20,14 +20,13 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Unit testing the fire notifications. -""" +"""Unit testing the fire notifications.""" import unittest from unittest.mock import patch import yaml import io -from posttroll.message import Message +# from posttroll.message import Message from activefires_pp.fire_notifications import EndUserNotifier from activefires_pp.fire_notifications import EndUserNotifierRegional @@ -99,29 +98,32 @@ text: 'Stop being bothered: Send a note to unsubscribe@mydomain.xx' """ -REGIONAL_TEST_MESSAGE = """pytroll://VIIRS/L2/Fires/PP/Regional/0114 file safusr.u@lxserv1043.smhi.se 2021-04-19T11:16:49.542021 v1.01 application/json {"start_time": "2021-04-16T12:29:53", "end_time": "2021-04-16T12:31:18", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17666, "region_name": "Storstockholms brandf\u00f6rsvar", "region_code": "0114", "uri": "ssh://lxserv1043.smhi.se//san1/polar_out/direct_readout/viirs_active_fires/filtered/AFIMG_NOAA-20_d20210416_t122953_0114.geojson", "uid": "AFIMG_NOAA-20_d20210416_t122953_0114.geojson", "type": "GEOJSON-filtered", "format": "geojson", "product": "afimg"}""" +REGIONAL_TEST_MESSAGE = """pytroll://VIIRS/L2/Fires/PP/Regional/0114 file safusr.u@lxserv1043.smhi.se 2021-04-19T11:16:49.542021 v1.01 application/json {"start_time": "2021-04-16T12:29:53", "end_time": "2021-04-16T12:31:18", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17666, "region_name": "Storstockholms brandf\u00f6rsvar", "region_code": "0114", "uri": "ssh://lxserv1043.smhi.se//san1/polar_out/direct_readout/viirs_active_fires/filtered/AFIMG_NOAA-20_d20210416_t122953_0114.geojson", "uid": "AFIMG_NOAA-20_d20210416_t122953_0114.geojson", "type": "GEOJSON-filtered", "format": "geojson", "product": "afimg"}""" # noqa -NATIONAL_TEST_MESSAGE = """pytroll://VIIRS/L2/Fires/PP/National file safusr.u@lxserv1043.smhi.se 2021-04-19T11:16:49.519087 v1.01 application/json {"start_time": "2021-04-16T12:29:53", "end_time": "2021-04-16T12:31:18", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17666, "uri": "ssh://lxserv1043.smhi.se//san1/polar_out/direct_readout/viirs_active_fires/filtered/AFIMG_j01_d20210416_t122953.geojson", "uid": "AFIMG_j01_d20210416_t122953.geojson", "type": "GEOJSON-filtered", "format": "geojson", "product": "afimg"}""" +NATIONAL_TEST_MESSAGE = """pytroll://VIIRS/L2/Fires/PP/National file safusr.u@lxserv1043.smhi.se 2021-04-19T11:16:49.519087 v1.01 application/json {"start_time": "2021-04-16T12:29:53", "end_time": "2021-04-16T12:31:18", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17666, "uri": "ssh://lxserv1043.smhi.se//san1/polar_out/direct_readout/viirs_active_fires/filtered/AFIMG_j01_d20210416_t122953.geojson", "uid": "AFIMG_j01_d20210416_t122953.geojson", "type": "GEOJSON-filtered", "format": "geojson", "product": "afimg"}""" # noqa class MyNetrcMock(object): + """Mocking the handling of secrets via the .netrc file.""" def __init__(self): - + """Initialize the netrc mock class.""" self.hosts = {'default': ('my_user', None, 'my_passwd')} def authenticators(self, host): + """Return authentication for host.""" return self.hosts.get(host) class TestNotifyEndUsers(unittest.TestCase): + """Test notifications on National fires.""" @patch('activefires_pp.fire_notifications.netrc') @patch('activefires_pp.fire_notifications.socket.gethostname') @patch('activefires_pp.fire_notifications.read_config') @patch('activefires_pp.fire_notifications.EndUserNotifier._setup_and_start_communication') def test_get_options_national_filtering(self, setup_comm, read_config, gethostname, netrc): - + """Test get the config options for National fires filtering.""" secrets = MyNetrcMock() netrc.return_value = secrets gethostname.return_value = 'default' @@ -166,13 +168,14 @@ def test_get_options_national_filtering(self, setup_comm, read_config, gethostna class TestNotifyEndUsersRegional(unittest.TestCase): + """Test the regional notifications.""" @patch('activefires_pp.fire_notifications.netrc') @patch('activefires_pp.fire_notifications.socket.gethostname') @patch('activefires_pp.fire_notifications.read_config') @patch('activefires_pp.fire_notifications.EndUserNotifierRegional._setup_and_start_communication') def test_get_options_regional_filtering(self, setup_comm, read_config, gethostname, netrc): - + """Test get the config options for regional filtering.""" secrets = MyNetrcMock() netrc.return_value = secrets gethostname.return_value = 'default' @@ -228,7 +231,7 @@ def test_get_options_regional_filtering(self, setup_comm, read_config, gethostna # assert this.input_topic == 'VIIRS/L2/Fires/PP/Regional' # assert this.output_topic == 'VIIRS/L2/MSB/Regional' - input_msg = Message.decode(rawstr=REGIONAL_TEST_MESSAGE) + # input_msg = Message.decode(rawstr=REGIONAL_TEST_MESSAGE) # this.notify_end_users(input_msg) # ffdata = {"features": [{"geometry": {"coordinates": [17.198933, 59.577972], "type": "Point"}, @@ -242,7 +245,6 @@ def test_get_options_regional_filtering(self, setup_comm, read_config, gethostna @patch('activefires_pp.fire_notifications.EndUserNotifierRegional._setup_and_start_communication') def test_get_recipients_for_region(self, setup_comm, read_config, gethostname, netrc): """Test getting the recipients for a given region.""" - secrets = MyNetrcMock() netrc.return_value = secrets gethostname.return_value = 'default' diff --git a/activefires_pp/tests/test_fires_filtering.py b/activefires_pp/tests/test_fires_filtering.py index d4db38c..a50b509 100644 --- a/activefires_pp/tests/test_fires_filtering.py +++ b/activefires_pp/tests/test_fires_filtering.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021, 2022 Adam Dybbroe +# Copyright (c) 2021 - 2023 Adam Dybbroe # Author(s): @@ -24,7 +24,9 @@ import pytest from unittest.mock import patch +from unittest import TestCase import pandas as pd +from geojson import FeatureCollection import numpy as np import io from datetime import datetime @@ -33,8 +35,11 @@ from activefires_pp.post_processing import ActiveFiresPostprocessing from activefires_pp.post_processing import COL_NAMES +from activefires_pp.post_processing import geojson_feature_collection_from_detections TEST_ACTIVE_FIRES_FILEPATH = "./AFIMG_j01_d20210414_t1126439_e1128084_b17637_c20210414114130392094_cspp_dev.txt" +TEST_ACTIVE_FIRES_FILEPATH2 = "./AFIMG_npp_d20230616_t1110054_e1111296_b60284_c20230616112418557033_cspp_dev.txt" + TEST_ACTIVE_FIRES_FILE_DATA = """ # Active Fires I-band EDR @@ -72,6 +77,30 @@ 57.42747116, -3.47912717, 353.80722046, 0.375, 0.375, 8, 12.13035393 """ +# Here we have sorted out all detections not passing the filter mask! +# So, 4 fire detections are left corresponding to what would end up in the geojson files: +TEST_ACTIVE_FIRES_FILE_DATA2 = """ +# Active Fires I-band EDR +# +# source: AFIMG_npp_d20230616_t1110054_e1111296_b60284_c20230616112418557033_cspp_dev.nc +# version: CSPP Active Fires version: cspp-active-fire-noaa_1.1.0 +# +# column 1: latitude of fire pixel (degrees) +# column 2: longitude of fire pixel (degrees) +# column 3: I04 brightness temperature of fire pixel (K) +# column 4: Along-scan fire pixel resolution (km) +# column 5: Along-track fire pixel resolution (km) +# column 6: detection confidence ([7,8,9]->[lo,med,hi]) +# column 7: fire radiative power (MW) +# +# number of fire pixels: 14 +# + 62.65801239, 17.25905228, 339.66326904, 0.375, 0.375, 8, 2.51202917 + 64.21694183, 17.42074966, 329.65161133, 0.375, 0.375, 8, 3.39806151 + 64.56904602, 16.60095215, 346.52050781, 0.375, 0.375, 8, 20.59289360 + 64.57222748, 16.59840012, 348.72860718, 0.375, 0.375, 8, 20.59289360 +""" + CONFIG_EXAMPLE = {'publish_topic': '/VIIRS/L2/Fires/PP', 'subscribe_topics': 'VIIRS/L2/AFI', 'af_pattern_ibands': @@ -82,6 +111,7 @@ '{region_name:s}.geojson', 'regional_shapefiles_format': 'omr_{region_code:s}_Buffer.{ext:s}', 'output_dir': '/path/where/the/filtered/results/will/be/stored', + 'filepath_detection_id_cache': '/path/to/the/detection_id/cache', 'timezone': 'Europe/Stockholm'} OPEN_FSTREAM = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA) @@ -125,8 +155,8 @@ def test_add_start_and_end_time_to_active_fires_data_utc(readdata): """Test adding start and end times to the active fires data.""" myfilepath = TEST_ACTIVE_FIRES_FILEPATH - fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA) - afdata = pd.read_csv(fstream, index_col=None, header=None, comment='#', names=COL_NAMES) + # fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA) + afdata = pd.read_csv(OPEN_FSTREAM, index_col=None, header=None, comment='#', names=COL_NAMES) readdata.return_value = afdata this = ActiveFiresShapefileFiltering(filepath=myfilepath, timezone='GMT') @@ -215,6 +245,12 @@ def test_regional_fires_filtering(setup_comm, get_config, gethostname): fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA) afdata = pd.read_csv(fstream, index_col=None, header=None, comment='#', names=COL_NAMES) + starttime = datetime.fromisoformat('2021-04-14 11:26:43.900') + endtime = datetime.fromisoformat('2021-04-14 11:28:08') + + afdata['starttime'] = np.repeat(starttime, len(afdata)).astype(np.datetime64) + afdata['endtime'] = np.repeat(endtime, len(afdata)).astype(np.datetime64) + # Add metadata to the pandas dataframe: fake_metadata = {'platform': 'j01', 'start_time': datetime(2021, 4, 14, 11, 26, 43, 900000), @@ -230,7 +266,6 @@ def test_regional_fires_filtering(setup_comm, get_config, gethostname): mymsg = "Fake message" with patch('activefires_pp.post_processing.store_geojson') as store_geojson: with patch('activefires_pp.post_processing.ActiveFiresPostprocessing._generate_output_message') as generate_msg: - store_geojson.return_value = "/some/output/path" generate_msg.return_value = "my fake output message" result = afpp.regional_fires_filtering_and_publishing(mymsg, regional_fmask, af_shapeff) @@ -272,22 +307,14 @@ def test_general_national_fires_filtering(get_global_mask, setup_comm, get_confi afdata = af_shapeff.get_af_data(MY_FILE_PATTERN) mymsg = "Fake message" + result = afpp.fires_filtering(mymsg, af_shapeff) - with patch('activefires_pp.post_processing.store_geojson') as store_geojson: - with patch('activefires_pp.post_processing.ActiveFiresPostprocessing.get_output_messages') as get_output_msg: - store_geojson.return_value = "/some/output/path" - get_output_msg.return_value = ["my fake output message"] - outmsg, result = afpp.fires_filtering(mymsg, af_shapeff) - - store_geojson.assert_called_once() - get_output_msg.assert_called_once() assert get_global_mask.call_count == 2 assert isinstance(result, pd.core.frame.DataFrame) assert len(result) == 1 np.testing.assert_almost_equal(result.iloc[0]['latitude'], 59.52483368) np.testing.assert_almost_equal(result.iloc[0]['longitude'], 17.1681633) - assert outmsg == ["my fake output message"] @pytest.mark.usefixtures("fake_national_borders_shapefile") @@ -327,3 +354,55 @@ def test_checking_national_borders_shapefile_file_nonexisting(setup_comm, get_co expected = "Shape file does not exist! Filename = /my/shape/file/with/country/borders" assert str(exec_info.value) == expected + + +@patch('activefires_pp.post_processing._read_data') +def test_get_feature_collection_from_firedata(readdata): + """Test get the Geojson Feature Collection from fire detection.""" + myfilepath = TEST_ACTIVE_FIRES_FILEPATH2 + + fstream = io.StringIO(TEST_ACTIVE_FIRES_FILE_DATA2) + afdata = pd.read_csv(fstream, index_col=None, header=None, comment='#', names=COL_NAMES) + readdata.return_value = afdata + + this = ActiveFiresShapefileFiltering(filepath=myfilepath, timezone='GMT') + with patch('os.path.exists') as mypatch: + mypatch.return_value = True + afdata = this.get_af_data(filepattern=MY_FILE_PATTERN, localtime=False) + + result = geojson_feature_collection_from_detections(afdata, platform_name='Suomi-NPP') + + # NB! The time of the afdata is here still in UTC! + expected = FeatureCollection([{"geometry": {"coordinates": [17.259052, 62.658012], + "type": "Point"}, + "properties": {"confidence": 8, + "observation_time": "2023-06-16T11:10:47.200000", + "platform_name": "Suomi-NPP", + "power": 2.51202917, "tb": 339.66326904}, + "type": "Feature"}, + {"geometry": {"coordinates": [17.42075, 64.216942], + "type": "Point"}, + "properties": {"confidence": 8, + "observation_time": "2023-06-16T11:10:47.200000", + "platform_name": "Suomi-NPP", + "power": 3.39806151, + "tb": 329.65161133}, + "type": "Feature"}, + {"geometry": {"coordinates": [16.600952, 64.569046], + "type": "Point"}, + "properties": {"confidence": 8, + "observation_time": "2023-06-16T11:10:47.200000", + "platform_name": "Suomi-NPP", + "power": 20.5928936, + "tb": 346.52050781}, + "type": "Feature"}, + {"geometry": {"coordinates": [16.5984, 64.572227], + "type": "Point"}, + "properties": {"confidence": 8, + "observation_time": "2023-06-16T11:10:47.200000", + "platform_name": "Suomi-NPP", + "power": 20.5928936, + "tb": 348.72860718}, + "type": "Feature"}]) + + TestCase().assertDictEqual(result, expected) diff --git a/activefires_pp/tests/test_geojson_utils.py b/activefires_pp/tests/test_geojson_utils.py index bd20512..048f99e 100644 --- a/activefires_pp/tests/test_geojson_utils.py +++ b/activefires_pp/tests/test_geojson_utils.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2022 Adam Dybbroe +# Copyright (c) 2022 - 2023 Adam Dybbroe # Author(s): diff --git a/activefires_pp/tests/test_messaging.py b/activefires_pp/tests/test_messaging.py index aee9bbc..5d1437e 100644 --- a/activefires_pp/tests/test_messaging.py +++ b/activefires_pp/tests/test_messaging.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021, 2022 Adam.Dybbroe +# Copyright (c) 2021, 2022, 2023 Adam.Dybbroe # Author(s): @@ -20,35 +20,139 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Unit testing the message handling part of the post-processing -""" +"""Unit testing the message handling part of the post-processing.""" from unittest.mock import patch from datetime import datetime from posttroll.message import Message +from posttroll.testing import patched_publisher +from posttroll.publisher import create_publisher_from_dict_config + from activefires_pp.post_processing import ActiveFiresPostprocessing from activefires_pp.spatiotemporal_alarm_filtering import _create_output_message -TEST_MSG = """pytroll://VIIRS/L2/AFI/edr/2/nrk/test/polar/direct_readout file safusr.t@lxserv2313.smhi.se 2021-04-07T00:41:41.568370 v1.01 application/json {"start_time": "2021-04-07T00:28:17", "end_time": "2021-04-07T00:29:40", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "format": "edr", "type": "netcdf", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17530, "origin": "172.29.4.164:9099", "uri": "ssh://lxserv2313.smhi.se/san1/polar_out/direct_readout/viirs_active_fires/unfiltered/AFIMG_j01_d20210407_t0028179_e0029407_b17531_c20210407004133375592_cspp_dev.nc", "uid": "AFIMG_j01_d20210407_t0028179_e0029407_b17531_c20210407004133375592_cspp_dev.nc"}""" +TEST_MSG = """pytroll://VIIRS/L2/AFI/edr/2/nrk/test/polar/direct_readout file safusr.t@lxserv2313.smhi.se 2021-04-07T00:41:41.568370 v1.01 application/json {"start_time": "2021-04-07T00:28:17", "end_time": "2021-04-07T00:29:40", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "format": "edr", "type": "netcdf", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17530, "origin": "172.29.4.164:9099", "uri": "ssh://lxserv2313.smhi.se/san1/polar_out/direct_readout/viirs_active_fires/unfiltered/AFIMG_j01_d20210407_t0028179_e0029407_b17531_c20210407004133375592_cspp_dev.nc", "uid": "AFIMG_j01_d20210407_t0028179_e0029407_b17531_c20210407004133375592_cspp_dev.nc"}""" # noqa +TEST_MSG_TXT = """pytroll://VIIRS/L2/AFI/edr/2/nrk/test/polar/direct_readout file safusr.t@lxserv2313.smhi.se 2023-07-05T10:27:28.821803 v1.01 application/json {"start_time": "2023-07-05T10:07:50", "end_time": "2023-07-05T10:09:15", "orbit_number": 1, "platform_name": "Suomi-NPP", "sensor": "viirs", "format": "edr", "type": "txt", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 60553, "origin": "172.29.4.164:9099", "uri": "/san1/polar_out/direct_readout/viirs_active_fires/unfiltered/AFIMG_npp_d20230705_t1007509_e1009151_b60553_c20230705102721942345_cspp_dev.txt", "uid": "AFIMG_npp_d20230705_t1007509_e1009151_b60553_c20230705102721942345_cspp_dev.txt"}""" # noqa CONFIG_EXAMPLE = {'publish_topic': '/VIIRS/L2/Fires/PP', 'subscribe_topics': 'VIIRS/L2/AFI', 'af_pattern_ibands': - 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_hour:%H%M%S%f}_b{orbit:s}_c{processing_time:%Y%m%d%H%M%S%f}_cspp_dev.txt', + 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_hour:%H%M%S%f}_b{orbit:s}_c{processing_time:%Y%m%d%H%M%S%f}_cspp_dev.txt', # noqa 'geojson_file_pattern_national': 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S}.geojson', - 'geojson_file_pattern_regional': 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S}_{region_name:s}.geojson', + 'geojson_file_pattern_regional': 'AFIMG_{platform:s}_d{start_time:%Y%m%d_t%H%M%S}_{region_name:s}.geojson', # noqa 'regional_shapefiles_format': 'omr_{region_code:s}_Buffer.{ext:s}', 'output_dir': '/path/where/the/filtered/results/will/be/stored'} +def get_fake_publiser(): + """Return a fake publisher.""" + return create_publisher_from_dict_config(dict(port=1979, nameservers=False)) + + +@patch('os.path.exists') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +def test_check_incoming_message_nc_file_exists(setup_comm, + get_config, gethostname, path_exists): + """Test the check of incoming message content and getting the file path from the message. + + Here we test the case when a netCDF file is provided in the message and we + test the behaviour when the file also actually exist on the file system. + """ + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + path_exists.return_value = True + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp.publisher = get_fake_publiser() + afpp.publisher.start() + + input_msg = Message.decode(rawstr=TEST_MSG) + with patched_publisher() as published_messages: + result = afpp.check_incoming_message_and_get_filename(input_msg) + + assert result is None + assert len(published_messages) == 2 + assert 'No fire detections for this granule' in published_messages[0] + assert 'No fire detections for this granule' in published_messages[1] + assert 'VIIRS/L2/Fires/PP/National' in published_messages[0] + assert 'VIIRS/L2/Fires/PP/Regional' in published_messages[1] + + +@patch('os.path.exists') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +def test_check_incoming_message_txt_file_exists(setup_comm, + get_config, gethostname, path_exists): + """Test the check of incoming message content and getting the file path from the message. + + Here we test the case when a txt file is provided in the message and we + test the behaviour when the file also actually exist on the file system. + """ + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + path_exists.return_value = True + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp.publisher = get_fake_publiser() + afpp.publisher.start() + + input_msg = Message.decode(rawstr=TEST_MSG_TXT) + with patched_publisher() as published_messages: + result = afpp.check_incoming_message_and_get_filename(input_msg) + + assert len(published_messages) == 0 + assert result == '/san1/polar_out/direct_readout/viirs_active_fires/unfiltered/AFIMG_npp_d20230705_t1007509_e1009151_b60553_c20230705102721942345_cspp_dev.txt' # noqa + + +@patch('os.path.exists') +@patch('socket.gethostname') +@patch('activefires_pp.post_processing.read_config') +@patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') +def test_check_incoming_message_txt_file_does_not_exist(setup_comm, + get_config, gethostname, path_exists): + """Test the check of incoming message content and getting the file path from the message. + + Here we test the case when a txt file is provided in the message and we + test the behaviour when the file does not exist on the file system. + """ + get_config.return_value = CONFIG_EXAMPLE + gethostname.return_value = "my.host.name" + path_exists.return_value = False + + myconfigfile = "/my/config/file/path" + myborders_file = "/my/shape/file/with/country/borders" + mymask_file = "/my/shape/file/with/polygons/to/filter/out" + + afpp = ActiveFiresPostprocessing(myconfigfile, myborders_file, mymask_file) + afpp.publisher = get_fake_publiser() + afpp.publisher.start() + + input_msg = Message.decode(rawstr=TEST_MSG_TXT) + with patched_publisher() as published_messages: + result = afpp.check_incoming_message_and_get_filename(input_msg) + + assert len(published_messages) == 0 + assert result is None + + @patch('socket.gethostname') @patch('activefires_pp.post_processing.read_config') @patch('activefires_pp.post_processing.ActiveFiresPostprocessing._setup_and_start_communication') def test_prepare_posttroll_message(setup_comm, get_config, gethostname): """Test setup the posttroll message.""" - get_config.return_value = CONFIG_EXAMPLE gethostname.return_value = "my.host.name" @@ -93,12 +197,12 @@ def test_prepare_posttroll_message(setup_comm, get_config, gethostname): def test_create_output_message(tmp_path): """Test create output message from geojson payload.""" - input_msg = Message.decode(rawstr=TEST_MSG) filename = tmp_path / 'test_geojson_alarm_file.geojson' output_topic = '/VIIRS/L2/Fires/PP/SOSAlarm' geojson_alarm = {"features": {"geometry": {"coordinates": [16.249069, 57.156235], "type": "Point"}, - "properties": {"confidence": 8, "observation_time": "2021-06-19T02:58:45.700000+02:00", + "properties": {"confidence": 8, + "observation_time": "2021-06-19T02:58:45.700000+02:00", "platform_name": "NOAA-20", "power": 2.23312426, "related_detection": False, diff --git a/activefires_pp/tests/test_utils.py b/activefires_pp/tests/test_utils.py index 58c3179..f572b31 100644 --- a/activefires_pp/tests/test_utils.py +++ b/activefires_pp/tests/test_utils.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021, 2022 Adam.Dybbroe +# Copyright (c) 2021, 2022, 2023 Adam.Dybbroe # Author(s): @@ -20,8 +20,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Unit testing the utility functions. -""" +"""Unit testing the utility functions.""" import pytest from freezegun import freeze_time @@ -32,12 +31,11 @@ from activefires_pp.utils import datetime_utc2local from activefires_pp.utils import json_serial -NATIONAL_TEST_MESSAGE = """pytroll://VIIRS/L2/Fires/PP/National file safusr.u@lxserv1043.smhi.se 2021-04-19T11:16:49.519087 v1.01 application/json {"start_time": "2021-04-16T12:29:53", "end_time": "2021-04-16T12:31:18", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17666, "uri": "ssh://lxserv1043.smhi.se//san1/polar_out/direct_readout/viirs_active_fires/filtered/AFIMG_j01_d20210416_t122953.geojson", "uid": "AFIMG_j01_d20210416_t122953.geojson", "type": "GEOJSON-filtered", "format": "geojson", "product": "afimg"}""" +NATIONAL_TEST_MESSAGE = """pytroll://VIIRS/L2/Fires/PP/National file safusr.u@lxserv1043.smhi.se 2021-04-19T11:16:49.519087 v1.01 application/json {"start_time": "2021-04-16T12:29:53", "end_time": "2021-04-16T12:31:18", "orbit_number": 1, "platform_name": "NOAA-20", "sensor": "viirs", "data_processing_level": "2", "variant": "DR", "orig_orbit_number": 17666, "uri": "ssh://lxserv1043.smhi.se//san1/polar_out/direct_readout/viirs_active_fires/filtered/AFIMG_j01_d20210416_t122953.geojson", "uid": "AFIMG_j01_d20210416_t122953.geojson", "type": "GEOJSON-filtered", "format": "geojson", "product": "afimg"}""" # noqa def test_json_serial(): """Test the json_serial function.""" - dtime_obj = datetime(2021, 4, 7, 11, 58, 53, 200000) res = json_serial(dtime_obj) @@ -64,7 +62,6 @@ def test_get_filename_from_posttroll_message(): @freeze_time('2022-03-26 18:12:05') def test_utc2localtime_conversion(): """Test converting utc time to local time.""" - atime1 = datetime.utcnow() dtobj = datetime_utc2local(atime1, 'Europe/Stockholm') assert dtobj.strftime('%Y%m%d-%H%M') == '20220326-1912' diff --git a/examples/fires_pp.yaml b/examples/fires_pp.yaml index f5dec6a..a44f72e 100644 --- a/examples/fires_pp.yaml +++ b/examples/fires_pp.yaml @@ -11,5 +11,7 @@ regional_shapefiles_format: omr_{region_code:s}_Buffer.{ext:s} output_dir: /path/where/the/filtered/results/will/be/stored +filepath_detection_id_cache = /path/to/the/detection_id/cache + timezone: Europe/Stockholm # pytz.all_timezones \ No newline at end of file