Skip to content

Commit

Permalink
Miscellaneous Changes
Browse files Browse the repository at this point in the history
* Added a function in FederatedASDFDataSet to fetch unique location
  codes
* The correlator module now handles location codes and their preferences
  more gracefully
* Minor changes in cwb2asdf
  • Loading branch information
geojunky committed Aug 9, 2023
1 parent 8652cb3 commit 43103fe
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 6 deletions.
14 changes: 14 additions & 0 deletions seismic/ASDFdatabase/FederatedASDFDataSet.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,20 @@ def get_waveforms(self, network, station, location, channel, starttime,

# end func

def get_location_codes(self, network, station, starttime=None, endtime=None):
"""
:param network: network code
:param station: station code
:param starttime: start time string in UTCDateTime format; can also be an instance of obspy.UTCDateTime
:param endtime: end time string in UTCDateTime format; can also be an instance of obspy.UTCDateTime
:return: a list containing unique location codes within the timeframe specified
"""

return self.fds.get_location_codes(network, station, starttime=starttime, endtime=endtime)

# end func

def stations_iterator(self, network_list=[], station_list=[]):
"""
This function provides an iterator over the entire data volume contained in all the ASDF files listed in the
Expand Down
22 changes: 22 additions & 0 deletions seismic/ASDFdatabase/_FederatedASDFDataSetImpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -589,6 +589,28 @@ def get_waveforms(self, network, station, location, channel, starttime,
return s
# end func

def get_location_codes(self, network, station, starttime=None, endtime=None):
st, et = self.get_global_time_range(network, station)

if(starttime):
starttime = UTCDateTime(starttime)
if(starttime > st): st = starttime
# end if

if(endtime):
endtime = UTCDateTime(endtime)
if(endtime < et): et = endtime
# end if

rows = self.get_stations(st, et, network=network, station=station)
uniqueLocCodes = set()
for row in rows:
uniqueLocCodes.add(row[2])
# end for

return sorted(list(uniqueLocCodes))
# end func

def stations_iterator(self, network_list=[], station_list=[]):
workload = None
if(self.rank==0):
Expand Down
8 changes: 6 additions & 2 deletions seismic/ASDFdatabase/cwb2asdf/cwb2asdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,17 @@ def make_ASDF_tag(tr, tag):
@click.argument('inventory', required=True,
type=click.Path(exists=True))
@click.argument('output-file-name', required=True)
@click.option('--file-pattern', type=str, default='*.mseed',
help="File pattern to be used while looking for data files")
@click.option('--channels-to-extract', type=str, default=None, help="Channels to extract, within quotes and space- "
"separated.")
@click.option('--min-length-sec', type=int, default=None, help="Minimum length in seconds")
@click.option('--merge-threshold', type=int, default=None, help="Merge traces if the number of traces fetched for an "
"interval exceeds this threshold")
@click.option('--ntraces-per-file', type=int, default=3600, help="Maximum number of traces per file; if exceeded, the "
"file is ignored.")
def process(input_folder, inventory, output_file_name, channels_to_extract, min_length_sec, merge_threshold,
def process(input_folder, inventory, output_file_name, file_pattern,
channels_to_extract, min_length_sec, merge_threshold,
ntraces_per_file):
"""
INPUT_FOLDER: Path to input folder containing miniseed files \n
Expand Down Expand Up @@ -109,11 +112,12 @@ def _write(ds, ostream, inventory_dict, netsta_set):

# generate a list of files
paths = [i for i in os.listdir(input_folder) if os.path.isfile(os.path.join(input_folder, i))]
expr = re.compile(fnmatch.translate('*.mseed'), re.IGNORECASE)
expr = re.compile(fnmatch.translate(file_pattern), re.IGNORECASE)
files = [os.path.join(input_folder, j) for j in paths if re.match(expr, j)]

files = np.array(files)
random.Random(nproc).shuffle(files)
#print(files); exit(0)
#files = files[370:380]

ustations = set()
Expand Down
9 changes: 6 additions & 3 deletions seismic/xcorqc/xcorqc.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,14 +638,17 @@ def IntervalStackXCorr(refds, tempds,
'together is redundant')
# end if

# setup logger
# get preferred location codes, or use the first available
ref_loc = location_preferences_dict[ref_net_sta]
temp_loc = location_preferences_dict[temp_net_sta]
if(ref_loc is None): ref_loc = ''
if(temp_loc is None): temp_loc = ''
if(ref_loc is None): ref_loc = refds.get_location_codes(*ref_net_sta.split('.'))[0]
if(temp_loc is None): temp_loc = tempds.get_location_codes(*temp_net_sta.split('.'))[0]

stationPair = '%s.%s.%s.%s.%s.%s' % (ref_net_sta, ref_loc, ref_cha, temp_net_sta, temp_loc, temp_cha)
fn = os.path.join(outputPath, '%s.log' % (stationPair if not tracking_tag else
'.'.join([stationPair, tracking_tag])))

# setup logger
logger = setup_logger(stationPair, fn)

#########################################################################
Expand Down
17 changes: 16 additions & 1 deletion tests/test_seismic/ASDFdatabase/test_federatedasdfdataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,21 @@ def test_get_stations():
assert len(station_set) == 8
# end func

def test_get_locaiton_codes():
fds = FederatedASDFDataSet(asdf_file_list)

rows = np.array(fds.get_stations('1900-01-01T00:00:00', '2100-01-01T00:00:00'))

location_set = set()
for n, s in rows[:, 0:2]:
locs = fds.get_location_codes(n, s)
for loc in locs: location_set.add(loc)
# end for

# There is 1 unique locaiton code in the h5 file
assert len(location_set) == 1
# end func

def test_get_coordinates():
fds = FederatedASDFDataSet(asdf_file_list)

Expand Down Expand Up @@ -121,7 +136,7 @@ def test_get_global_time_range():
# end func


def test_get_stations():
def test_stations_iterator():
fds = FederatedASDFDataSet(asdf_file_list)

local_netsta_list = list(fds.stations_iterator())
Expand Down

0 comments on commit 43103fe

Please sign in to comment.