diff --git a/pyproject.toml b/pyproject.toml index 32e69ca..3076a64 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,6 @@ ignore = [ 'PERF203', # Allow try-except in loop 'PLR0912', # Allow functions with many branches 'PT', # Not using pytest - 'PTH', # Still using os.path 'RET', # Allow elif/else after return 'S101', # Assert is used to prevent incorrect 'S105', # Some hardcoded test passwords @@ -57,3 +56,8 @@ source = [ 'writer', 'wsgi', ] + +[tool.coverage.report] +show_missing = true +skip_empty = true +skip_covered = true diff --git a/tests/test_writer_acceptance.py b/tests/test_writer_acceptance.py index b260966..a168320 100644 --- a/tests/test_writer_acceptance.py +++ b/tests/test_writer_acceptance.py @@ -66,7 +66,7 @@ def tearDown(self): shutil.rmtree(DATASTORE_PATH / '2017') def test_event_acceptance(self): - self.writer_app.process_data(self.pickle_filename['CIC']) + self.writer_app.process_data(self.pickle_filename['CIC'], DATASTORE_PATH) data = self.read_table('events') self.assertEqual(data['timestamp'], 1488093964) @@ -86,7 +86,7 @@ def test_event_acceptance(self): self.assertEqual(tr1, base64.decodebytes(tr1_b64)) def test_singles_acceptance(self): - self.writer_app.process_data(self.pickle_filename['SIN']) + self.writer_app.process_data(self.pickle_filename['SIN'], DATASTORE_PATH) data = self.read_table('singles') self.assertEqual(data['timestamp'], 1488094031) @@ -99,7 +99,7 @@ def test_singles_acceptance(self): self.assertEqual(len(blobs), 0) def test_weather_acceptance(self): - self.writer_app.process_data(self.pickle_filename['WTR']) + self.writer_app.process_data(self.pickle_filename['WTR'], DATASTORE_PATH) data = self.read_table('weather') self.assertEqual(data['timestamp'], 1488094084) @@ -111,7 +111,7 @@ def test_weather_acceptance(self): self.assertEqual(len(blobs), 0) def test_config_acceptance(self): - self.writer_app.process_data(self.pickle_filename['CFG']) + self.writer_app.process_data(self.pickle_filename['CFG'], DATASTORE_PATH) data = self.read_table('config') self.assertEqual(data['timestamp'], 1488125225) self.assertEqual(data['mas_ch1_thres_high'], 320) @@ -139,7 +139,3 @@ class TestWriterAcceptancePy3Pickles(TestWriterAcceptancePy2Pickles): """Acceptance tests for python 3 pickles""" pickle_version = 'py3' - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_wsgi_app.py b/tests/test_wsgi_app.py index 23380c3..1a9cb89 100644 --- a/tests/test_wsgi_app.py +++ b/tests/test_wsgi_app.py @@ -148,12 +148,8 @@ def assert_num_files_in_datastore(self, incoming=0, suspicious=0): self.assertEqual(len(self.files_in_folder(DATASTORE_PATH / 'suspicious')), suspicious) def assert_num_events_written(self, number_of_events): - fn = self.files_in_folder(DATASTORE_PATH / 'incoming')[0] - with open(fn, 'rb') as f: - data = pickle.load(f) + file_path = self.files_in_folder(DATASTORE_PATH / 'incoming')[0] + with file_path.open('rb') as file_handle: + data = pickle.load(file_handle) written_event_list = data['event_list'] self.assertEqual(len(written_event_list), number_of_events) - - -if __name__ == '__main__': - unittest.main() diff --git a/writer/storage.py b/writer/storage.py index f2782fd..7e2061d 100644 --- a/writer/storage.py +++ b/writer/storage.py @@ -1,7 +1,5 @@ """Storage docstrings""" -import os - import tables @@ -276,12 +274,11 @@ def open_or_create_file(data_dir, date): :param date: the event date """ - directory = os.path.join(data_dir, '%d/%d' % (date.year, date.month)) - file = os.path.join(directory, '%d_%d_%d.h5' % (date.year, date.month, date.day)) + directory = data_dir / f'{date.year}/{date.month}' + file = directory / f'{date.year}_{date.month}_{date.day}.h5' - if not os.path.exists(directory): - # create dir and parent dirs with mode rwxr-xr-x - os.makedirs(directory, 0o755) + # Ensure dir and parent directories exist with mode rwxr-xr-x + directory.mkdir(mode=0o755, parents=True, exist_ok=True) return tables.open_file(file, 'a') diff --git a/writer/writer_app.py b/writer/writer_app.py index 57841c7..e4684ff 100644 --- a/writer/writer_app.py +++ b/writer/writer_app.py @@ -8,11 +8,11 @@ import configparser import logging import logging.handlers -import os import pickle -import shutil import time +from pathlib import Path + from writer.store_events import store_event_list LEVELS = { @@ -53,31 +53,34 @@ def writer(configfile): level = LEVELS.get(config.get('General', 'loglevel'), logging.NOTSET) logger.setLevel(level=level) - queue = os.path.join(config.get('General', 'data_dir'), 'incoming') - partial_queue = os.path.join(config.get('General', 'data_dir'), 'partial') + data_dir = Path(config.get('General', 'data_dir')) + queue = data_dir / 'incoming' + partial_queue = data_dir / 'partial' + + sleep_duration = config.getint('Writer', 'sleep') # writer process try: while True: - entries = os.listdir(queue) + entries = queue.iterdir() if not entries: - time.sleep(config.getint('Writer', 'sleep')) + time.sleep(sleep_duration) for entry in entries: - path = os.path.join(queue, entry) - shutil.move(path, partial_queue) + partial_path = partial_queue / entry.name + entry.rename(partial_path) + + process_data(partial_path, data_dir) + partial_path.unlink() - path = os.path.join(partial_queue, entry) - process_data(path) - os.remove(path) except Exception: logger.exception('Exception occured, quitting.') -def process_data(file): +def process_data(file, data_dir): """Read data from a pickled object and store store in raw datastore""" - with open(file, 'rb') as handle: + with file.open('rb') as handle: try: data = pickle.load(handle) except UnicodeDecodeError: @@ -85,7 +88,7 @@ def process_data(file): data = decode_object(pickle.load(handle, encoding='bytes')) logger.debug(f"Processing data for station {data['station_id']}") - store_event_list(config.get('General', 'data_dir'), data['station_id'], data['cluster'], data['event_list']) + store_event_list(data_dir, data['station_id'], data['cluster'], data['event_list']) def decode_object(o): diff --git a/wsgi/wsgi_app.py b/wsgi/wsgi_app.py index 6bda202..199cb08 100644 --- a/wsgi/wsgi_app.py +++ b/wsgi/wsgi_app.py @@ -9,6 +9,8 @@ import tempfile import urllib.parse +from pathlib import Path + from . import rcodes LEVELS = { @@ -133,7 +135,8 @@ def do_init(configfile): station_list except NameError: station_list = {} - with open(config.get('General', 'station_list')) as file: + station_list_path = Path(config.get('General', 'station_list')) + with station_list_path.open() as file: reader = csv.reader(file) for station in reader: if station: @@ -147,12 +150,14 @@ def store_data(station_id, cluster, event_list): logger.debug(f'Storing data for station {station_id}') - directory = os.path.join(config.get('General', 'data_dir'), 'incoming') - tmp_dir = os.path.join(config.get('General', 'data_dir'), 'tmp') + data_dir = Path(config.get('General', 'data_dir')) + + directory = data_dir / 'incoming' + tmp_dir = data_dir / 'tmp' if is_data_suspicious(event_list): logger.debug('Event list marked as suspicious.') - directory = os.path.join(config.get('General', 'data_dir'), 'suspicious') + directory = data_dir / 'suspicious' file = tempfile.NamedTemporaryFile(dir=tmp_dir, delete=False) logger.debug(f'Filename: {file.name}')