Skip to content

Commit

Permalink
Replace os.path by pathlib.Path (#49)
Browse files Browse the repository at this point in the history
  • Loading branch information
153957 committed Jul 4, 2024
2 parents f9a8df2 + 484166e commit 27bc159
Show file tree
Hide file tree
Showing 6 changed files with 42 additions and 41 deletions.
6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ ignore = [
'PERF203', # Allow try-except in loop
'PLR0912', # Allow functions with many branches
'PT', # Not using pytest
'PTH', # Still using os.path
'RET', # Allow elif/else after return
'S101', # Assert is used to prevent incorrect
'S105', # Some hardcoded test passwords
Expand Down Expand Up @@ -57,3 +56,8 @@ source = [
'writer',
'wsgi',
]

[tool.coverage.report]
show_missing = true
skip_empty = true
skip_covered = true
12 changes: 4 additions & 8 deletions tests/test_writer_acceptance.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def tearDown(self):
shutil.rmtree(DATASTORE_PATH / '2017')

def test_event_acceptance(self):
self.writer_app.process_data(self.pickle_filename['CIC'])
self.writer_app.process_data(self.pickle_filename['CIC'], DATASTORE_PATH)

data = self.read_table('events')
self.assertEqual(data['timestamp'], 1488093964)
Expand All @@ -86,7 +86,7 @@ def test_event_acceptance(self):
self.assertEqual(tr1, base64.decodebytes(tr1_b64))

def test_singles_acceptance(self):
self.writer_app.process_data(self.pickle_filename['SIN'])
self.writer_app.process_data(self.pickle_filename['SIN'], DATASTORE_PATH)

data = self.read_table('singles')
self.assertEqual(data['timestamp'], 1488094031)
Expand All @@ -99,7 +99,7 @@ def test_singles_acceptance(self):
self.assertEqual(len(blobs), 0)

def test_weather_acceptance(self):
self.writer_app.process_data(self.pickle_filename['WTR'])
self.writer_app.process_data(self.pickle_filename['WTR'], DATASTORE_PATH)

data = self.read_table('weather')
self.assertEqual(data['timestamp'], 1488094084)
Expand All @@ -111,7 +111,7 @@ def test_weather_acceptance(self):
self.assertEqual(len(blobs), 0)

def test_config_acceptance(self):
self.writer_app.process_data(self.pickle_filename['CFG'])
self.writer_app.process_data(self.pickle_filename['CFG'], DATASTORE_PATH)
data = self.read_table('config')
self.assertEqual(data['timestamp'], 1488125225)
self.assertEqual(data['mas_ch1_thres_high'], 320)
Expand Down Expand Up @@ -139,7 +139,3 @@ class TestWriterAcceptancePy3Pickles(TestWriterAcceptancePy2Pickles):
"""Acceptance tests for python 3 pickles"""

pickle_version = 'py3'


if __name__ == '__main__':
unittest.main()
10 changes: 3 additions & 7 deletions tests/test_wsgi_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,12 +148,8 @@ def assert_num_files_in_datastore(self, incoming=0, suspicious=0):
self.assertEqual(len(self.files_in_folder(DATASTORE_PATH / 'suspicious')), suspicious)

def assert_num_events_written(self, number_of_events):
fn = self.files_in_folder(DATASTORE_PATH / 'incoming')[0]
with open(fn, 'rb') as f:
data = pickle.load(f)
file_path = self.files_in_folder(DATASTORE_PATH / 'incoming')[0]
with file_path.open('rb') as file_handle:
data = pickle.load(file_handle)
written_event_list = data['event_list']
self.assertEqual(len(written_event_list), number_of_events)


if __name__ == '__main__':
unittest.main()
11 changes: 4 additions & 7 deletions writer/storage.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
"""Storage docstrings"""

import os

import tables


Expand Down Expand Up @@ -276,12 +274,11 @@ def open_or_create_file(data_dir, date):
:param date: the event date
"""
directory = os.path.join(data_dir, '%d/%d' % (date.year, date.month))
file = os.path.join(directory, '%d_%d_%d.h5' % (date.year, date.month, date.day))
directory = data_dir / f'{date.year}/{date.month}'
file = directory / f'{date.year}_{date.month}_{date.day}.h5'

if not os.path.exists(directory):
# create dir and parent dirs with mode rwxr-xr-x
os.makedirs(directory, 0o755)
# Ensure dir and parent directories exist with mode rwxr-xr-x
directory.mkdir(mode=0o755, parents=True, exist_ok=True)

return tables.open_file(file, 'a')

Expand Down
31 changes: 17 additions & 14 deletions writer/writer_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,11 @@
import configparser
import logging
import logging.handlers
import os
import pickle
import shutil
import time

from pathlib import Path

from writer.store_events import store_event_list

LEVELS = {
Expand Down Expand Up @@ -53,39 +53,42 @@ def writer(configfile):
level = LEVELS.get(config.get('General', 'loglevel'), logging.NOTSET)
logger.setLevel(level=level)

queue = os.path.join(config.get('General', 'data_dir'), 'incoming')
partial_queue = os.path.join(config.get('General', 'data_dir'), 'partial')
data_dir = Path(config.get('General', 'data_dir'))
queue = data_dir / 'incoming'
partial_queue = data_dir / 'partial'

sleep_duration = config.getint('Writer', 'sleep')

# writer process
try:
while True:
entries = os.listdir(queue)
entries = queue.iterdir()

if not entries:
time.sleep(config.getint('Writer', 'sleep'))
time.sleep(sleep_duration)

for entry in entries:
path = os.path.join(queue, entry)
shutil.move(path, partial_queue)
partial_path = partial_queue / entry.name
entry.rename(partial_path)

process_data(partial_path, data_dir)
partial_path.unlink()

path = os.path.join(partial_queue, entry)
process_data(path)
os.remove(path)
except Exception:
logger.exception('Exception occured, quitting.')


def process_data(file):
def process_data(file, data_dir):
"""Read data from a pickled object and store store in raw datastore"""
with open(file, 'rb') as handle:
with file.open('rb') as handle:
try:
data = pickle.load(handle)
except UnicodeDecodeError:
logger.debug('Data seems to be pickled using python 2. Decoding.')
data = decode_object(pickle.load(handle, encoding='bytes'))

logger.debug(f"Processing data for station {data['station_id']}")
store_event_list(config.get('General', 'data_dir'), data['station_id'], data['cluster'], data['event_list'])
store_event_list(data_dir, data['station_id'], data['cluster'], data['event_list'])


def decode_object(o):
Expand Down
13 changes: 9 additions & 4 deletions wsgi/wsgi_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import tempfile
import urllib.parse

from pathlib import Path

from . import rcodes

LEVELS = {
Expand Down Expand Up @@ -133,7 +135,8 @@ def do_init(configfile):
station_list
except NameError:
station_list = {}
with open(config.get('General', 'station_list')) as file:
station_list_path = Path(config.get('General', 'station_list'))
with station_list_path.open() as file:
reader = csv.reader(file)
for station in reader:
if station:
Expand All @@ -147,12 +150,14 @@ def store_data(station_id, cluster, event_list):

logger.debug(f'Storing data for station {station_id}')

directory = os.path.join(config.get('General', 'data_dir'), 'incoming')
tmp_dir = os.path.join(config.get('General', 'data_dir'), 'tmp')
data_dir = Path(config.get('General', 'data_dir'))

directory = data_dir / 'incoming'
tmp_dir = data_dir / 'tmp'

if is_data_suspicious(event_list):
logger.debug('Event list marked as suspicious.')
directory = os.path.join(config.get('General', 'data_dir'), 'suspicious')
directory = data_dir / 'suspicious'

file = tempfile.NamedTemporaryFile(dir=tmp_dir, delete=False)
logger.debug(f'Filename: {file.name}')
Expand Down

0 comments on commit 27bc159

Please sign in to comment.