Skip to content

Commit

Permalink
Merge branch 'eqt_docs' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
geojunky committed Jun 7, 2023
2 parents 6b49fcb + f71db21 commit 3b45c28
Show file tree
Hide file tree
Showing 28 changed files with 242 additions and 433 deletions.
20 changes: 1 addition & 19 deletions iloc_rstt/iloc_phase_ident.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,25 +25,7 @@
#import subprocess
import subprocess32 as subprocess
import psutil

def split_list(lst, npartitions):
result = []
for i in np.arange(npartitions):
result.append([])
# end for
count = 0
for iproc in np.arange(npartitions):
for i in np.arange(np.divide(len(lst), npartitions)):
result[iproc].append(lst[count])
count += 1
# end for
for iproc in np.arange(np.mod(len(lst), npartitions)):
result[iproc].append(lst[count])
count += 1
# end for

return result
# end func
from seismic.misc import split_list

def kill(proc_pid):
process = psutil.Process(proc_pid)
Expand Down
28 changes: 1 addition & 27 deletions iloc_rstt/sc3_extract_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,36 +30,10 @@
from obspy.geodetics.base import gps2dist_azimuth, kilometers2degrees
import MySQLdb
import os
from seismic.misc import split_list, recursive_glob

DEVNULL = open(os.devnull, 'wb')

def split_list(lst, npartitions):
result = []
for i in np.arange(npartitions):
result.append([])
# end for
count = 0
for iproc in np.arange(npartitions):
for i in np.arange(np.divide(len(lst), npartitions)):
result[iproc].append(lst[count])
count += 1
# end for
for iproc in np.arange(np.mod(len(lst), npartitions)):
result[iproc].append(lst[count])
count += 1
# end for

return result
# end func

def recursive_glob(treeroot, pattern):
results = []
for base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in goodfiles)
return results
# end func

def runprocess(cmd, get_results=False):
results = []
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=DEVNULL)
Expand Down
16 changes: 2 additions & 14 deletions iloc_rstt/xmlevents_to_ascii.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,19 +28,7 @@
from obspy import read_events, read_inventory
from obspy.geodetics.base import gps2dist_azimuth, kilometers2degrees
import os

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in xrange(npartitions)]
# end func

def recursive_glob(treeroot, pattern):
results = []
for base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in goodfiles)
return results
# end func
from seismic.misc import split_list, recursive_glob

CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])

Expand Down Expand Up @@ -179,7 +167,7 @@ def process(data_path, inventory_file, scratch_path, output_file_stem):
lineout = ' '.join(line[1::2]).format(*line[::2])
sprocfile.write(lineout + '\n')
# end for
if (len(notFound)): print 'Rank: %d'%(rank), notFound
if (len(notFound)): print('Rank: %d'%(rank), notFound)
# end for

pprocfile.close()
Expand Down
3 changes: 1 addition & 2 deletions seismic/ASDFdatabase/FederatedASDFDataSet.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@
from scipy.spatial import cKDTree

from seismic.ASDFdatabase._FederatedASDFDataSetImpl import _FederatedASDFDataSetImpl
from seismic.ASDFdatabase.utils import rtp2xyz

from seismic.misc import rtp2xyz

class FederatedASDFDataSet():
def __init__(self, asdf_source, logger=None, single_item_read_limit_in_mb=1024):
Expand Down
6 changes: 1 addition & 5 deletions seismic/ASDFdatabase/_FederatedASDFDataSetImpl.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import hashlib
from functools import partial
from seismic.ASDFdatabase.utils import MIN_DATE, MAX_DATE
from seismic.misc import split_list
import pickle as cPickle
import pandas as pd
from rtree import index
Expand All @@ -51,11 +52,6 @@ def setup_logger(name, log_file, level=logging.INFO):
return logger
# end func

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]
# end func

def split_list_by_timespan(l, n):
lmin = np.min(l[:, 1])
lmax = np.max(l[:, 1])
Expand Down
6 changes: 1 addition & 5 deletions seismic/ASDFdatabase/asdf2event_mseed.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,12 @@
import os, sys
import re
from seismic.ASDFdatabase.FederatedASDFDataSet import FederatedASDFDataSet
from seismic.misc import split_list
from obspy import Stream, UTCDateTime, read_events
from obspy.geodetics.base import locations2degrees
from obspy.taup import TauPyModel
import click

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]
# end func

def dump_traces(fds, events_xml, sn_list, start_date, end_date, min_dist, max_dist,
time_before_p, time_after_p, output_folder):
"""
Expand Down
6 changes: 1 addition & 5 deletions seismic/ASDFdatabase/asdf2mseed.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,7 @@
import pyasdf
from obspy.core.trace import Trace
import click

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]
# end func
from seismic.misc import split_list

def dump_traces(ds, sn_list, start_date, end_date, length, min_length_sec, output_folder):
"""
Expand Down
6 changes: 1 addition & 5 deletions seismic/ASDFdatabase/asdf_preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,7 @@
from obspy.core.util.misc import get_window_times
import gc
from obspy.core.util.misc import limit_numpy_fft_cache

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]
# end func
from seismic.misc import split_list

def getStationInventory(master_inventory, inventory_cache, netsta):
netstaInv = None
Expand Down
11 changes: 1 addition & 10 deletions seismic/ASDFdatabase/cwb2asdf/cwb2asdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,7 @@
from obspy.core import Stream
from ordered_set import OrderedSet as set
from tqdm import tqdm


def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]


# end func
from seismic.misc import split_list

def make_ASDF_tag(tr, tag):
# def make_ASDF_tag(ri, tag):
Expand All @@ -47,8 +40,6 @@ def make_ASDF_tag(tr, tag):
end=tr.stats.endtime.strftime("%Y-%m-%dT%H:%M:%S"),
tag=tag)
return data_name


# end func

CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
Expand Down
56 changes: 2 additions & 54 deletions seismic/ASDFdatabase/cwb2asdf/demultiplex.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
import os
import random
import subprocess

from seismic.misc_p import ProgressTracker
from seismic.misc import split_list
import click
from mpi4py import MPI


def runprocess(cmd, get_results=False):
results = []
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
Expand All @@ -35,61 +35,9 @@ def runprocess(cmd, get_results=False):
p.wait()

return p.returncode, results


# end func

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]


# end func

class ProgressTracker:
def __init__(self, output_folder, restart_mode=False):
self.output_folder = output_folder
self.restart_mode = restart_mode

self.comm = MPI.COMM_WORLD
self.nproc = self.comm.Get_size()
self.rank = self.comm.Get_rank()

self.prev_progress = 0 # progress from a previous run
self.progress = 0
self.proc_fn = os.path.join(output_folder, 'prog.%d.txt' % (self.rank))

if (self.restart_mode):
if (not os.path.exists(self.proc_fn)):
raise Exception('Progress file (%s) not found' % (self.proc_fn))
# end if

self.prev_progress = int(open(self.proc_fn).read())
# end if

# end func

def increment(self):
self.progress += 1
if (self.restart_mode and (self.prev_progress > 0) and (self.progress < self.prev_progress)):
return False
else:
tmpfn = self.proc_fn + '.tmp'
f = open(tmpfn, 'w+')
f.write(str(self.progress))
f.close()
os.rename(tmpfn, self.proc_fn)

return True
# end if
# end func


# end class

CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])


@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument('input-folder', required=True,
type=click.Path(exists=True))
Expand Down
10 changes: 1 addition & 9 deletions seismic/ASDFdatabase/plot_data_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,18 +46,10 @@
from tqdm import tqdm

from seismic.ASDFdatabase.FederatedASDFDataSet import FederatedASDFDataSet
from seismic.misc import split_list

logging.basicConfig()


def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]


# end func


def setup_logger(name, log_file, level=logging.INFO):
"""
Function to setup a logger; adapted from stackoverflow
Expand Down
17 changes: 2 additions & 15 deletions seismic/ASDFdatabase/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,12 @@
import os
from tqdm import tqdm
from ordered_set import OrderedSet as set
from seismic.misc import split_list

MAX_DATE = UTCDateTime(4102444800.0)
MIN_DATE = UTCDateTime(-2208988800.0)

def rtp2xyz(r, theta, phi):
xout = np.zeros((r.shape[0], 3))
rst = r * np.sin(theta)
xout[:, 0] = rst * np.cos(phi)
xout[:, 1] = rst * np.sin(phi)
xout[:, 2] = r * np.cos(theta)
return xout
# end func

class MseedIndex:
def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]
# end func

def __init__(self, mseed_folder, pattern):
self.mseed_folder = mseed_folder
self.comm = MPI.COMM_WORLD
Expand All @@ -42,7 +29,7 @@ def __init__(self, mseed_folder, pattern):

#self.mseed_files = self.mseed_files[:1000]

work_load = MseedIndex.split_list(self.mseed_files, self.nproc)
work_load = split_list(self.mseed_files, self.nproc)
counts = np.array([len(item) for item in work_load])
offsets = np.append(0, np.cumsum(counts[:-1]))
# end if
Expand Down
6 changes: 1 addition & 5 deletions seismic/hvsr/hvsr.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from seismic.receiver_fn.rf_plot_utils import pdf_merge
from tqdm import tqdm
from shutil import rmtree
from seismic.misc import split_list

def generate_master_curve(station:str, output_path:str,
sm:SpooledMatrix, hvsr_freq:np.ndarray,
Expand Down Expand Up @@ -360,11 +361,6 @@ def process(asdf_source, network, spec_method, output_path, win_length,
print(stations)
print("")

def split_list(lst, npartitions):
k, m = divmod(len(lst), npartitions)
return [lst[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(npartitions)]
# end func

proc_stations = split_list(stations, nproc)
# end if

Expand Down
20 changes: 1 addition & 19 deletions seismic/inventory/dataio/catalogcsv.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,25 +14,7 @@
from tqdm.auto import tqdm

from seismic.inventory.dataio.event_attrs import Origin, Event, Magnitude, Arrival


def recursive_glob(treeroot, pattern):
"""
Generate a complete list of files matching pattern under the root of a directory hierarchy.
:param treeroot: Path to the root of the directory tree.
:type treeroot: str or pathlib.Path
:param pattern: File name pattern to match, e.g. "\*.csv"
:type pattern: str
:return: List of paths to the files matching the pattern, qualified relative to treeroot
:rtype: list(str)
"""
results = []
for base, dirs, files in os.walk(treeroot):
goodfiles = fnmatch.filter(files, pattern)
results.extend(os.path.join(base, f) for f in goodfiles)
return results

from seismic.misc import recursive_glob

class CatalogCSV:
"""
Expand Down

0 comments on commit 3b45c28

Please sign in to comment.