Skip to content

Commit

Permalink
More Realistic Estimate of Cross-correlation Pairs
Browse files Browse the repository at this point in the history
* The correlator script now factors in temporal overlap while
  determining the list of potential station-pairs
* Added a simple script to export station-coordinates in csv and kml
  formats
  • Loading branch information
geojunky committed Jun 16, 2023
1 parent 3b45c28 commit eac50e4
Show file tree
Hide file tree
Showing 2 changed files with 168 additions and 11 deletions.
129 changes: 129 additions & 0 deletions seismic/ASDFdatabase/export_station_locations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#!/bin/env python
"""
Description:
Export station locations in kml format.
References:
CreationDate: 16/06/23
Developer: rakib.hassan@ga.gov.au
Revision History:
LastUpdate: 16/06/23 RH
LastUpdate: dd/mm/yyyy Who Optional description
"""

import numpy as np
from obspy import UTCDateTime
import click
from seismic.ASDFdatabase.FederatedASDFDataSet import FederatedASDFDataSet

CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument('asdf-source', required=True,
type=click.Path(exists=True))
@click.argument('output-filename', required=True,
type=click.Path(dir_okay=False))
@click.option('--output-format', type=click.Choice(['csv', 'kml']),
default='csv', show_default=True,
help='Output format')
@click.option('--network', type=str, default=None, show_default=True,
help="Network name")
@click.option('--station', type=str, default=None, show_default=True,
help="Station name")
@click.option('--location', type=str, default=None, show_default=True,
help="Location name")
@click.option('--channel', type=str, default=None, show_default=True,
help="Channel name")
@click.option('--start-date', default='1900-01-01',
help="Start date-time in UTC format",
type=str, show_default=True)
@click.option('--end-date', default='2100-01-01',
help="End date-time in UTC format.",
type=str, show_default=True)
def process(asdf_source, output_filename, output_format, network, station, location, channel,
start_date, end_date):
"""
ASDF_SOURCE: Path to text file containing paths to ASDF files\n
OUTPUT_FILENAME: Output file-name\n
Example usage:
"""

def write_kml(rows, ofn):
if('.kml' not in ofn): ofn += '.kml'
fh = open(ofn, "w")
fh.write("""<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2"> \
\n""")
fh.write("<Document>\n")
for row in rows:
fh.write("\t<Placemark>\n")
fh.write("\t\t<name>" + '.'.join([row[0], row[1]]) + "</name>\n")
fh.write("\t\t<Point>")
fh.write("\t\t\t<coordinates>" + ','.join([str(row[2]), str(row[3])]) + "</coordinates>")
fh.write("\t\t</Point>\n")
fh.write("\t</Placemark>\n")
# end for
fh.write("</Document>\n")
fh.write("</kml>\n")
fh.close()
# end func

def write_csv(rows, ofn):
if ('.csv' not in ofn): ofn += '.csv'
np.savetxt(ofn, rows, header='net, sta, lon, lat',
fmt='%s, %s, %f, %f')
# end func

start_date_ts = None
end_date_ts = None
try:
start_date_ts = UTCDateTime(start_date).timestamp if start_date else None
end_date_ts = UTCDateTime(end_date).timestamp if end_date else None
except Exception as e:
print(str(e))
assert 0, 'Invalid input'
# end try

ds = FederatedASDFDataSet(asdf_source)

query = 'select ns.net, ns.sta, ns.lon, ns.lat from netsta as ns, wdb as wdb ' \
'where ns.net=wdb.net and ns.sta=wdb.sta '

if (network):
query += ' and ns.net="{}" '.format(network)
# end if

if (station):
query += ' and ns.sta="{}" '.format(station)
# end if

if (location):
query += ' and wdb.loc="{}" '.format(location)
# end if

if (channel):
query += ' and wdb.cha="{}" '.format(channel)
# end if

if (start_date_ts and end_date_ts):
query += ' and wdb.st>={} and wdb.et<={}'.format(start_date_ts, end_date_ts)
# end if

query += ' group by ns.net, ns.sta'

# fetch rows
rows = ds.fds.conn.execute(query).fetchall()
array_dtype = [('net', 'U10'), ('sta', 'U10'),
('lon', 'float'), ('lat', 'float')]
rows = np.array(rows, dtype=array_dtype)

if(output_format == 'kml'): write_kml(rows, output_filename)
else: write_csv(rows, output_filename)
# end func

if (__name__ == '__main__'):
process()
# end if
50 changes: 39 additions & 11 deletions seismic/xcorqc/correlator.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,22 +99,22 @@ def get_closest_stations(self, netsta, other_dataset, nn=1):

def get_unique_station_pairs(self, other_dataset, nn=1):
pairs = set()
for st1 in self.netsta_list:
st2list = None
for ns1 in self.netsta_list:
ns2list = None
if (nn != -1):
if self == other_dataset:
st2list = set(self.get_closest_stations(st1, other_dataset, nn=nn + 1))
if st1 in st2list:
st2list.remove(st1)
st2list = list(st2list)
ns2list = set(self.get_closest_stations(ns1, other_dataset, nn=nn + 1))
if ns1 in ns2list:
ns2list.remove(ns1)
ns2list = list(ns2list)
else:
st2list = self.get_closest_stations(st1, other_dataset, nn=nn)
ns2list = self.get_closest_stations(ns1, other_dataset, nn=nn)
else:
st2list = other_dataset.netsta_list
ns2list = other_dataset.netsta_list
# end if

for st2 in st2list:
pairs.add((st1, st2))
for ns2 in ns2list:
pairs.add((ns1, ns2))
# end for
# end for

Expand All @@ -128,7 +128,35 @@ def get_unique_station_pairs(self, other_dataset, nn=1):
# end if
# end if

return list(pairs_subset)
# cull pairs based on temporal overlap (note: gaps are not considered)
result_pairs = set()
range_cache = defaultdict(tuple)
for ns1, ns2 in pairs_subset:
st1 = et1 = st2 = et2 = None

if(ns1 in range_cache.keys()):
st1, et1 = range_cache[ns1]
else:
net1, sta1 = ns1.split('.')
st1, et1 = self.fds.get_global_time_range(net1, sta1)
range_cache[ns1] = (st1, et1)
# end if

if(ns2 in range_cache.keys()):
st2, et2 = range_cache[ns2]
else:
net2, sta2 = ns2.split('.')
st2, et2 = other_dataset.fds.get_global_time_range(net2, sta2)
range_cache[ns2] = (st2, et2)
# end if

# check for temporal overlap
if((st1 <= et2) and (st2 <= et1)):
result_pairs.add((ns1, ns2))
# end if
# end for

return list(result_pairs)
# end func
# end class

Expand Down

0 comments on commit eac50e4

Please sign in to comment.