Skip to content

Commit

Permalink
Minor refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
geojunky committed Aug 31, 2023
1 parent 43103fe commit bed16ab
Show file tree
Hide file tree
Showing 8 changed files with 48 additions and 112 deletions.
17 changes: 1 addition & 16 deletions legacy/misc/FederatedASDFDataSetMemVariant.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,22 +29,7 @@
from collections import defaultdict
from rtree import index
from seismic.ASDFdatabase.utils import MIN_DATE, MAX_DATE

logging.basicConfig()

def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name)
logger.setLevel(level)
logger.addHandler(handler)
return logger
# end func
from seismic.misc import setup_logger

def tree():
def the_tree():
Expand Down
9 changes: 7 additions & 2 deletions seismic/ASDFdatabase/FederatedASDFDataSet.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
from scipy.spatial import cKDTree

from seismic.ASDFdatabase._FederatedASDFDataSetImpl import _FederatedASDFDataSetImpl
from seismic.misc import rtp2xyz
from seismic.misc import rtp2xyz, setup_logger
from obspy.core import UTCDateTime

class FederatedASDFDataSet():
def __init__(self, asdf_source, logger=None,
Expand Down Expand Up @@ -272,6 +273,10 @@ def find_gaps(self, network=None, station=None, location=None,
if len(sys.argv) < 2:
print("******** USAGE: python3 %s %s **********"% (sys.argv[0], "asdf_file_list_txt"))
sys.exit(1)
# end if

asdf_file_list = sys.argv[1]
ds = FederatedASDFDataSet(asdf_file_list)
ts = UTCDateTime().strftime("%Y-%m-%d.T%H.%M.%S")
ofn = 'FederatedASDFDataSet.Indexer.{}.log'.format(ts)
logger = setup_logger('', ofn)
ds = FederatedASDFDataSet(asdf_file_list, logger=logger)
44 changes: 19 additions & 25 deletions seismic/ASDFdatabase/_FederatedASDFDataSetImpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,28 +30,12 @@
import hashlib
from functools import partial
from seismic.ASDFdatabase.utils import MIN_DATE, MAX_DATE
from seismic.misc import split_list
from seismic.misc import split_list, setup_logger
import pickle as cPickle
import pandas as pd
from rtree import index
import traceback

logging.basicConfig()

def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name)
logger.setLevel(level)
logger.addHandler(handler)
return logger
# end func

def split_list_by_timespan(l, n):
lmin = np.min(l[:, 1])
lmax = np.max(l[:, 1])
Expand Down Expand Up @@ -304,20 +288,30 @@ def day_split(trc):

def create_database(self):
def decode_tag(tag, type='raw_recording'):
"""
Tags are expected in the form: {NET}.{STA}.{LOC}.{CHA}__{ST}__{ET}__{TAG}, where
ST and ET are expected as YYYY-MM-DDThh:mm:ss.s.
@param tag: tag
@param type: str
@return: network, station, location, channel, starttime, endtime
"""
if (type not in tag): return None
try:
tokens = tag.split('.')
nc, sc, lc = tokens[0], tokens[1], tokens[2]
nslc, st, et, _ = tag.split('__')
nc, sc, lc, cc = nslc.split('.')

tokens = tokens[3].split('__')
cc = tokens[0]
starttime = UTCDateTime(tokens[1]).timestamp
endtime = UTCDateTime(tokens[2]).timestamp
starttime = UTCDateTime(st).timestamp
endtime = UTCDateTime(et).timestamp

return nc, sc, lc, cc, starttime, endtime
if((endtime - starttime) == 0):
return None
else:
return nc, sc, lc, cc, starttime, endtime
# end if
except Exception:
if self.logger:
self.logger.error("Failed to decode tag {}".format(tag))
self.logger.warning("Failed to decode tag {}".format(tag))
return None
# end try
# end func
Expand Down
21 changes: 1 addition & 20 deletions seismic/ASDFdatabase/plot_data_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,26 +46,7 @@
from tqdm import tqdm

from seismic.ASDFdatabase.FederatedASDFDataSet import FederatedASDFDataSet
from seismic.misc import split_list

logging.basicConfig()

def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name + log_file)
logger.setLevel(level)
logger.addHandler(handler)
return logger


# end func

from seismic.misc import split_list, setup_logger

def process_data(rank, fds, stations, start_time, end_time):
"""
Expand Down
17 changes: 17 additions & 0 deletions seismic/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,23 @@
import subprocess
import os, glob, fnmatch, sys
import numpy as np
import logging
logging.basicConfig()

def setup_logger(name, log_file, level=logging.INFO, propagate=False):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name+log_file)
logger.setLevel(level)
logger.addHandler(handler)
logger.propagate = propagate
return logger
# end func

def get_git_revision_hash() -> str:
"""
Expand Down
15 changes: 1 addition & 14 deletions seismic/pick_harvester/createEnsembleXML.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,20 +46,7 @@
from seismic.pick_harvester.utils import recursive_glob, split_list
import logging
from tqdm import tqdm

def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name+log_file)
logger.setLevel(level)
logger.addHandler(handler)
return logger
# end func
from seismic.misc import setup_logger

class Origin:
__slots__ = ['utctime', 'lat', 'lon', 'depthkm', 'magnitude_list', 'arrival_list']
Expand Down
18 changes: 1 addition & 17 deletions seismic/pick_harvester/local/pick_eqt.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

from mpi4py import MPI
import os
import logging

from ordered_set import OrderedSet as set
import numpy as np
Expand All @@ -31,6 +30,7 @@
from seismic.xcorqc.utils import get_stream
from seismic.xcorqc.xcorqc import taper
from seismic.misc_p import ProgressTracker
from seismic.misc import setup_logger
import matplotlib.pyplot as plt

from keras.models import load_model
Expand All @@ -39,24 +39,8 @@
from EQTransformer.core.mseed_predictor import _picker
from collections import defaultdict

logging.basicConfig()
DAY_SECONDS = 24 * 3600

def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name+log_file)
logger.setLevel(level)
logger.addHandler(handler)
logger.propagate = False
return logger
# end func

def getWorkLoad(fds:FederatedASDFDataSet, netsta_list:str,
start_time:UTCDateTime, end_time:UTCDateTime):
"""
Expand Down
19 changes: 1 addition & 18 deletions seismic/xcorqc/xcorqc.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,24 +38,7 @@
from netCDF4 import Dataset
from functools import reduce
from seismic.xcorqc.utils import SpooledMatrix
logging.basicConfig()


def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
"""
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
handler = logging.FileHandler(log_file, mode='w')
handler.setFormatter(formatter)

logger = logging.getLogger(name+log_file)
logger.setLevel(level)
logger.addHandler(handler)
logger.propagate = False
return logger
# end func

from seismic.misc import setup_logger

def zeropad(tr, padlen):
assert (tr.shape[0] < padlen)
Expand Down

0 comments on commit bed16ab

Please sign in to comment.