Skip to content

Commit

Permalink
Added support for focal mechanism angles at command line, and started…
Browse files Browse the repository at this point in the history
… working on code to support retrieval of MT info from USGS ComCat
  • Loading branch information
mhearne-usgs committed Apr 8, 2013
1 parent 45c8067 commit a6d6248
Show file tree
Hide file tree
Showing 6 changed files with 222 additions and 9 deletions.
4 changes: 4 additions & 0 deletions .gitignore
@@ -0,0 +1,4 @@
*.py[cod]
*.grd
*.db
run*
43 changes: 42 additions & 1 deletion strec.py
Expand Up @@ -6,9 +6,31 @@
import ConfigParser
import getpass
import datetime
import math

from strec.gmpe import GMPESelector
import strec.utils
from strec import cmt

def getPlungeValues(strike,dip,rake,mag):
mom = 10**((mag*1.5)+16.1)
d2r = math.pi/180.0

mrr=mom*math.sin(2*dip*d2r)*math.sin(rake*d2r)
mtt=-mom*((math.sin(dip*d2r)*math.cos(rake*d2r)*math.sin(2*strike*d2r))+(math.sin(2*dip*d2r)*math.sin(rake*d2r)*(math.sin(strike*d2r)*math.sin(strike*d2r))))
mpp=mom*((math.sin(dip*d2r)*math.cos(rake*d2r)*math.sin(2*strike*d2r))-(math.sin(2*dip*d2r)*math.sin(rake*d2r)*(math.cos(strike*d2r)*math.cos(strike*d2r))))
mrt=-mom*((math.cos(dip*d2r)*math.cos(rake*d2r)*math.cos(strike*d2r))+(math.cos(2*dip*d2r)*math.sin(rake*d2r)*math.sin(strike*d2r)))
mrp=mom*((math.cos(dip*d2r)*math.cos(rake*d2r)*math.sin(strike*d2r))-(math.cos(2*dip*d2r)*math.sin(rake*d2r)*math.cos(strike*d2r)))
mtp=-mom*((math.sin(dip*d2r)*math.cos(rake*d2r)*math.cos(2*strike*d2r))+(0.5*math.sin(2*dip*d2r)*math.sin(rake*d2r)*math.sin(2*strike*d2r)))

plungetuple = cmt.compToAxes(mrr,mtt,mpp,mrt,mrp,mtp)
plungevals = {}
plungevals['T'] = plungetuple[0].copy()
plungevals['N'] = plungetuple[1].copy()
plungevals['P'] = plungetuple[2].copy()
plungevals['NP1'] = plungetuple[3].copy()
plungevals['NP2'] = plungetuple[4].copy()
return plungevals

if __name__ == '__main__':

Expand All @@ -26,6 +48,9 @@
parser.add_option("-d", "--datafile",dest="datafile",
metavar="DATAFILE",
help="Specify the database (.db) file containing moment tensor solutions.")
parser.add_option("-a", "--angles",dest="angles",
metavar="ANGLES",
help='Specify the focal mechanism by providing "strike dip rake"')
parser.add_option("-c", "--csv-out",
action="store_true", dest="outputCSV", default=False,
help="print output as csv")
Expand Down Expand Up @@ -80,14 +105,30 @@
etime = None
forceComposite = True

if options.angles is not None:
parts = options.angles.split()
try:
strike = float(parts[0])
dip = float(parts[1])
rake = float(parts[2])
plungevals = getPlungeValues(strike,dip,rake,magnitude)
except:
print 'Could not parse Strike, Dip and Rake from "%s"' % options.angles
sys.exit(1)
else:
plungevals = None

if options.forceComposite:
forceComposite = True
plungevals = None

homedir = os.path.dirname(os.path.abspath(__file__)) #where is this script?
zoneconfigfile = os.path.join(homedir,'strec.ini')
gs = GMPESelector(zoneconfigfile,datafile,homedir,datafolder)

strecresults = gs.selectGMPE(lat,lon,depth,magnitude,date=etime,forceComposite=forceComposite)
strecresults = gs.selectGMPE(lat,lon,depth,magnitude,date=etime,
forceComposite=forceComposite,
plungevals=plungevals)

if options.outputCSV:
strecresults.renderCSV(sys.stdout)
Expand Down
4 changes: 3 additions & 1 deletion strec/gmpe.py
Expand Up @@ -267,7 +267,9 @@ def selectGMPE(self,lat,lon,depth,magnitude,date=None,forceComposite=False,plung
plungevals,fmwarning,mtsource = self.getFocMechAxes(lat,lon,depth,magnitude)
else:
plungevals,fmwarning,mtsource = self.getFocMechAxes(lat,lon,depth,magnitude,date=date)

else:
mtsource = 'input'
fmwarning = ''
if plungevals is None:
plungevals = {'T':{},'N':{},'P':{},'NP1':{},'NP2':{}}
plungevals['T']['plunge'] = numpy.nan
Expand Down
161 changes: 156 additions & 5 deletions strec/mtreader.py
@@ -1,15 +1,24 @@
#!/usr/bin/env python

#stdlib imports
import re
import datetime
import pytz
import math
from xml.dom.minidom import parse
from cmt import compToAxes
import os.path
import sqlite3
import urllib,urllib2
import json
import sys
import copy

#local imports
from cmt import compToAxes


class CSVReaderError(Exception):
"""Used to indicate errors in the CSV reader class."""
class ReaderError(Exception):
"""Used to indicate errors in the reader classes."""

def appendDataFile(infile,dbfile,filetype,dtype,hasHeader=False):
if filetype == 'ndk':
Expand Down Expand Up @@ -84,6 +93,135 @@ def __init__(self,mtfile,type=None):
def generateRecords(self,startDate=None,enddate=None,hasHeader=False):
pass

class ComCatReader(MTReader):
URLBASE = 'http://comcat.cr.usgs.gov/earthquakes/feed/search.php?%s'
def __init__(self):
pass

def getEvents(self,pdict):
params = urllib.urlencode(pdict)
searchurl = self.URLBASE % params
datadict = None
try:
fh = urllib2.urlopen(searchurl)
data = fh.read()
data2 = data[len(pdict['callback'])+1:-2]
datadict = json.loads(data2)
fh.close()
except Exception,errorobj:
raise errorobj
return datadict['features']

def generateRecords(self,startdate=None,enddate=None,hasHeader=False):
utc = pytz.UTC
if startdate is None:
startdate = datetime.datetime(1971,1,1)
startdate = utc.localize(startdate)
if enddate is None:
enddate = datetime.datetime(1971,1,1)
enddate = utc.localize(enddate)
if (enddate-startdate).days < 365:
mintime = int(startdate.strftime('%s'))*1000
maxtime = int(enddate.strftime('%s'))*1000
params = pdict = {'productType[]':'moment-tensor,',
'minEventTime':mintime,
'maxEventTime':maxtime,
'callback':'comsearch'}
events = self.getEvents(params)
else:
ndays = (enddate-startdate).days
nyears = int(math.ceil(ndays/365.0))
events = []
mindate = startdate
maxdate = mindate + datetime.timedelta(days=365)
for i in range(0,nyears):
mintime = int(mindate.strftime('%s'))*1000
maxtime = int(maxdate.strftime('%s'))*1000
params = pdict = {'productType[]':'moment-tensor,',
'minEventTime':mintime,
'maxEventTime':maxtime,
'callback':'comsearch'}
events = events + self.getEvents(params)
mindate = maxdate
maxdate = mindate + datetime.timedelta(days=365)
for event in events:
etimesecs = int(event['properties']['time'])/1000
etime = datetime.datetime.utcfromtimestamp(etimesecs)
if etime < startdate or etime > enddate:
continue
edicts = self.getMomentDicts(event)
for edict in edicts:
yield edict

def getMomentDicts(self,event):
eventdicts = []
datadict = {}
url = event['properties']['url'] + '.json'
try:
fh = urllib2.urlopen(url)
data = fh.read()
datadict = json.loads(data)
fh.close()
except Exception,errorobj:
raise errorobj
eqtime = datadict['summary']['time']
eqid = str(datetime.datetime.utcfromtimestamp(int(eqtime)/1000))
lat = float(datadict['summary']['latitude'])
lon = float(datadict['summary']['longitude'])
depth = float(datadict['summary']['depth'])
mag = float(datadict['summary']['magnitude'])
for mt in datadict['products']['moment-tensor']:
if mt['status'] == 'DELETE':
continue
bsource = mt['properties']['beachball-source']
bcode = mt['code']
#we have gcmt data going back further, directly from GCMT website
if bsource == 'LD' and bcode.find('GCMT') > -1:
continue

mttype = mt['properties']['beachball-type']
try:
mrr = float(mt['properties']['tensor-mrr'])
mtt = float(mt['properties']['tensor-mtt'])
mpp = float(mt['properties']['tensor-mpp'])
mrt = float(mt['properties']['tensor-mrt'])
mrp = float(mt['properties']['tensor-mrp'])
mtp = float(mt['properties']['tensor-mtp'])
except:
continue
np1strike = float(mt['properties']['nodal-plane-1-strike'])
np2strike = float(mt['properties']['nodal-plane-2-strike'])
np1dip = float(mt['properties']['nodal-plane-1-dip'])
np2dip = float(mt['properties']['nodal-plane-2-dip'])
try:
np1rake = float(mt['properties']['nodal-plane-1-rake'])
except:
np1rake = float(mt['properties']['nodal-plane-1-slip'])
try:
np2rake = float(mt['properties']['nodal-plane-2-rake'])
except:
np2rake = float(mt['properties']['nodal-plane-2-slip'])
try:
T,N,P,NP1,NP2 = compToAxes(mrr,mtt,mpp,mrt,mrp,mtp)
except Exception,exc:
pass
tplunge = T['plunge']
tazimuth = T['azimuth']
nplunge = N['plunge']
nazimuth = N['azimuth']
pplunge = P['plunge']
pazimuth = P['azimuth']
eventdicts.append({'id':eqid,'type':mttype,
'lat':lat,'lon':lon,
'depth':depth,'mag':mag,
'mrr':mrr,'mtt':mtt,
'mpp':mpp,'mrt':mrt,
'mrp':mrp,'mtp':mtp,
'tplunge':tplunge,'tazimuth':tazimuth,
'nplunge':nplunge,'nazimuth':nazimuth,
'pplunge':pplunge,'pazimuth':pazimuth})
return eventdicts

class CSVReader(MTReader):
idfmtsec = '%Y%m%d%H%M%S'
idfmtmin = '%Y%m%d%H%M'
Expand All @@ -102,7 +240,7 @@ def generateRecords(self,startdate=None,enddate=None,hasHeader=False):
else:
yield self.readline(line)
except Exception,msg:
raise CSVReaderError,"Error reading line %s" % line
raise ReaderError,'Error reading data from line:\n"%s".\nIs this a header row?' % line.strip()

self.fh.close()

Expand Down Expand Up @@ -409,10 +547,23 @@ def generateRecords(self,startdate=None,enddate=None):
self.dom.unlink()

if __name__ == '__main__':
#get a months worth of moment tensor records from ComCat
comreader = ComCatReader()
d1 = datetime.datetime(1976,1,1,0,0)
#d2 = datetime.datetime.now()
#d2 = datetime.datetime(2002,10,1)
d2 = datetime.datetime.now()
nc = 0
for record in comreader.generateRecords(startdate=d1,enddate=d2):
print '%s,%.1f,%s' % (record['id'],record['mag'],record['type'])
sys.stdout.flush()
nc += 1
print 'There are %i moment tensor records in ComCat' % nc
sys.exit(0)
xreader = QuakeMLReader('xmlsample.xml',type='GCMT')
for record in xreader.generateRecords():
print '%s,%.1f,%s' % (record['id'],record['mag'],record['type'])

creader = CSVReader('csvtest.csv','User')
for record in creader.generateRecords():
print record['id'],record['mag'],record['type']
Expand Down
12 changes: 10 additions & 2 deletions strec_convert.py
Expand Up @@ -82,6 +82,14 @@

if os.path.isfile(outfile):
print '%s already exists - appending new data.' % outfile
appendDataFile(infile,outfile,filetype,options.fmtype,hasHeader=options.hasheader)
try:
appendDataFile(infile,outfile,filetype,options.fmtype,hasHeader=options.hasheader)
except Exception,msg:
print 'Error reading input file %s.\n%s' % (infile,msg)
sys.exit(1)
else:
createDataFile(infile,outfile,filetype,options.fmtype,hasHeader=options.hasheader)
try:
createDataFile(infile,outfile,filetype,options.fmtype,hasHeader=options.hasheader)
except Exception,msg:
print 'Error reading input file %s.\n"%s"' % (infile,msg)
sys.exit(1)
7 changes: 7 additions & 0 deletions strec_init.py
Expand Up @@ -128,6 +128,9 @@ def fetchSlabs(datafolder):
parser.add_option("-g", "--gcmt",
action="store_true", dest="getGCMT", default=False,
help="Download all GCMT moment tensor data")
parser.add_option("-c", "--comcat",
action="store_true", dest="getComCat", default=False,
help="Download all USGS ComCat moment tensor data (sans GCMT)")
parser.add_option("-n", "--noslab",
action="store_true", dest="noSlab", default=False,
help="Do NOT download slab data")
Expand Down Expand Up @@ -196,6 +199,9 @@ def fetchSlabs(datafolder):
createDataFile(histfile,outfile,'ndk','gcmt',hasHeader=False)
print 'Finished converting historical GCMT data.'
os.remove(histfile)
if options.getComCat:


if options.update:
mostrecent = getMostRecent(outfile)
ryear = mostrecent.year
Expand Down Expand Up @@ -226,6 +232,7 @@ def fetchSlabs(datafolder):
mostrecent = getMostRecent(outfile)
print 'GCMT database contains events through %s.' % str(mostrecent)
sys.exit(0)




Expand Down

0 comments on commit a6d6248

Please sign in to comment.