Skip to content

Commit

Permalink
Merge pull request #81 from duncanmmacleod/gwpy-0.7
Browse files Browse the repository at this point in the history
Update trigger reading syntax for gwpy>=0.7
  • Loading branch information
Duncan Macleod committed Mar 14, 2018
2 parents 9306572 + 0040a4d commit f5c6ec2
Show file tree
Hide file tree
Showing 7 changed files with 93 additions and 95 deletions.
46 changes: 9 additions & 37 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,57 +1,30 @@
language: python

addons:
apt:
sources:
- sourceline: deb http://software.ligo.org/lscsoft/debian wheezy contrib
key_url: http://software.ligo.org/keys/deb/lscsoft.key
- sourceline: deb-src http://software.ligo.org/lscsoft/debian wheezy contrib
key_url: http://software.ligo.org/keys/deb/lscsoft.key
packages:
- gfortran # scipy
- libblas-dev # scipy
- liblapack-dev # scipy
- pkg-config # lal
- zlib1g-dev # lal
- libgsl0-dev # lal
- swig # lal
- bc # lal
- libfftw3-dev # lal

python:
- '2.6'
- '2.7'
- '3.5'
- '3.6'

env:
global:
- LAL_VERSION="6.18.0"
matrix:
- PIP_FLAGS="--quiet"
- PIP_FLAGS="--quiet --pre"

matrix:
exclude:
- python: '2.6'
env: PIP_FLAGS="--quiet --pre"
allow_failures:
- python: '2.6'
- python: '3.5'
- python: '3.6'
fast_finish: true

before_install:
- pip install -q --upgrade pip
# install specific packages for python2.6
- if [[ ${TRAVIS_PYTHON_VERSION} == '2.6' ]]; then pip install "scipy<0.18" "astropy<1.2" "matplotlib<2.0"; fi
- pip install ${PRE} -r requirements.txt
- .travis/build-lal.sh
- python -m pip install -q --upgrade pip
- python -m pip install lalsuite # to get LAL
- python -m pip install ${PIP_FLAGS} -r requirements.txt

install:
# note: need --editable for coverage with `which ...` to work
- pip install --editable .
- python -m pip install --editable .

script:
- pip install ${PRE} unittest2 coveralls "pytest>=2.8"
- python -m pip install ${PIP_FLAGS} unittest2 coveralls "pytest>=2.8"
- coverage run ./setup.py test
- coverage run --append `which hveto` --help
- coverage run --append `which hveto-cache-events` --help
Expand All @@ -61,7 +34,6 @@ after_success:
- coveralls

cache:
apt: true
pip: true
directories:
- lal-${LAL_VERSION}
before_cache:
- rm -f $HOME/.cache/pip/log/debug.log
93 changes: 56 additions & 37 deletions bin/hveto-cache-events
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,17 @@ import os
import warnings
import multiprocessing

from glue.lal import (Cache, CacheEntry)
from lal.utils import CacheEntry

from glue.lal import Cache
from glue.ligolw.ligolw import (Document, LIGO_LW, LIGOLWContentHandler)
from glue.ligolw.lsctables import ProcessTable
from glue.ligolw.utils import (write_filename as write_ligolw,
load_filename as load_ligolw)
from glue.ligolw.utils.process import (register_to_xmldoc as
append_process_table)

from gwpy.table.lsctables import (SnglBurstTable, SnglInspiralTable,
New as new_table)
from gwpy.io import ligolw as io_ligolw
from gwpy.time import to_gps
from gwpy.segments import (Segment, SegmentList,
DataQualityFlag, DataQualityDict)
Expand All @@ -47,10 +49,12 @@ from hveto import (__version__, log, config)
from hveto.triggers import (get_triggers, find_auxiliary_channels,
find_trigger_files)

IFO = os.getenv('IFO')

__author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'

Cache.entry_class = CacheEntry # remove deprecationwarning

IFO = os.getenv('IFO')

logger = log.Logger('hveto-cache-events')


Expand Down Expand Up @@ -99,10 +103,20 @@ pout.add_argument('-o', '--output-directory', default=os.curdir,
args = parser.parse_args()

ifo = args.ifo
start = args.gpsstart.seconds
end = args.gpsend.seconds
start = int(args.gpsstart)
end = int(args.gpsend)
duration = end - start

# format process params for LIGO_LW
procparams = {k.replace('_', '-'): v for k, v in vars(args).items() if v}
for gpskey in ('gpsstart', 'gpsend'):
procparams[gpskey] = int(procparams[gpskey])
for listkey in ('config-file', 'primary-cache', 'auxiliary-cache'):
try:
procparams[listkey] = ','.join(procparams[listkey])
except KeyError:
pass

logger.info("-- Welcome to Hveto --")
logger.info("GPS start time: %d" % start)
logger.info("GPS end time: %d" % end)
Expand Down Expand Up @@ -164,8 +178,8 @@ def create_filename(channel):
'%s-%s-%d-%d.xml.gz' % (ifo, name, start, duration))


def read_and_cache_events(channel, etg, cache=None, trigfindkwargs={},
**getkwargs):
def read_and_cache_events(channel, etg, cache=None, trigfind_kw={},
**read_kw):
cfile = create_filename(channel)
# read existing cached triggers and work out new segments to query
if args.append and os.path.isfile(cfile):
Expand All @@ -175,7 +189,7 @@ def read_and_cache_events(channel, etg, cache=None, trigfindkwargs={},
new = analysis.copy()
# get cache of files
if cache is None:
cache = find_trigger_files(channel, etg, new.active, **trigfindkwargs)
cache = find_trigger_files(channel, etg, new.active, **trigfind_kw)
else:
cache = cache.sieve(segmentlist=new.active)
# restrict 'active' segments to when we have data
Expand All @@ -186,14 +200,14 @@ def read_and_cache_events(channel, etg, cache=None, trigfindkwargs={},
# find new triggers
try:
trigs = get_triggers(channel, auxetg, new.active, cache=cache,
raw=True, **getkwargs)
raw=True, **read_kw)
# catch error and continue
except ValueError as e:
warnings.warn('%s: %s' % (type(e).__name__, str(e)))
else:
a = write_events(channel, trigs, new)
try:
return CacheEntry.from_T050017(a), trigs.size
return CacheEntry.from_T050017(a), len(trigs)
except TypeError: # None
return

Expand All @@ -203,36 +217,35 @@ def write_events(channel, tab, segments):
"""
# get filename
filename = create_filename(channel)
# if empty, skip
if tab.shape[0] == 0 and args.append and os.path.isfile(filename):
return filename
elif tab.shape[0] == 0:
return
# get table class and convert back to LIGO_LW table object
if 'peak_time' in tab.dtype.fields:
Table = SnglBurstTable
else:
Table = SnglInspiralTable
llwtable = Table.from_recarray(tab)

# read existing document
if args.append and os.path.isfile(filename):
xmldoc = load_ligolw(filename, gz=True,
contenthandler=LIGOLWContentHandler)
xmldoc = io_ligolw.read_ligolw(filename)
# or, create document
else:
xmldoc = Document()
xmldoc.appendChild(LIGO_LW())

# append process table
append_process_table(xmldoc, os.path.basename(__file__), {})
with multiprocessing.Lock():
ProcessTable.next_id = type(ProcessTable.next_id)(0)
process = append_process_table(xmldoc, os.path.basename(__file__),
procparams)

# append segment tables
segments.write(xmldoc, format='ligolw')
# append event table
try:
etable = Table.get_table(xmldoc)
except ValueError:
etable = new_table(Table, columns=tab.dtype.fields)
xmldoc.childNodes[-1].appendChild(etable)
etable.extend(llwtable)
segments.write(xmldoc, format='ligolw', append=True,
attrs={'process_id': process.process_id})
except TypeError as exc:
if 'process_id' in str(exc):
segments.write(xmldoc, format='ligolw', append=True)
else:
raise

# append event table
if len(tab):
tab.write(xmldoc, append=True)

# write file to disk
write_ligolw(xmldoc, filename, gz=True)
return filename
Expand Down Expand Up @@ -306,12 +319,13 @@ else:
petg = cp.get('primary', 'trigger-generator')
psnr = cp.getfloat('primary', 'snr-threshold')
pfreq = cp.getfloats('primary', 'frequency-range')
ptrigfind = dict((key.split('-', 1)[1], val) for (key, val) in
cp.items('primary') if key.startswith('trigfind-'))
preadkw = cp.getparams('primary', 'read-')
ptrigfindkw = cp.getparams('primary', 'trigfind-')

# load primary triggers
out = read_and_cache_events(pchannel, petg, snr=psnr, frange=pfreq,
cache=pcache, trigfindkwargs=ptrigfind)
cache=pcache, trigfind_kw=ptrigfindkw,
**preadkw)
try:
e, n = out
except TypeError:
Expand Down Expand Up @@ -339,6 +353,10 @@ logger.info('Primary cache written to %s' % pname)
logger.info("Reading triggers for aux channels...")
counter = multiprocessing.Value('i', 0)

areadkw = cp.getparams('auxiliary', 'read-')
atrigfindkw = cp.getparams('auxiliary', 'trigfind-')


def read_and_write_aux_triggers(channel):
if acache is None:
auxcache = None
Expand All @@ -347,7 +365,8 @@ def read_and_write_aux_triggers(channel):
desc = name.replace('-', '_')
auxcache = acache.sieve(ifos=ifo, description='%s*' % desc)
out = read_and_cache_events(channel, auxetg, cache=auxcache, snr=minsnr,
frange=auxfreq)
frange=auxfreq, trigfind_kw=atrigfindkw,
**areadkw)
try:
e, n = out
except TypeError:
Expand Down
19 changes: 12 additions & 7 deletions hveto/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,10 @@
; flow, fhigh
frequency-range = 0, 2048.
; format
read-format = ligolw.sngl_burst
read-format = ligolw
read-tablename = sngl_burst
; read-columns to read
read-columns = time, peak_frequency, snr
read-columns = peak, peak_frequency, snr
.. note::
Expand All @@ -113,7 +114,8 @@
trigger-generator = daily-cbc
trigfind-run = bbh_gds
trigfind-filetag = 16SEC_CLUSTERED
read-format = ligolw.sngl_inspiral
read-format = ligolw
read-tablename = sngl_inspiral
read-columns = end,template_duration,snr
[auxiliary]
Expand All @@ -138,9 +140,10 @@
; flow, fhigh
frequency-range = 0, 2048
; file format
read-format = ligolw.sngl_burst
read-format = ligolw
read-tablename = sngl_burst
; read-columns to read
read-columns = time, peak_frequency, snr
read-columns = peak, peak_frequency, snr
; give tab-indented, line-separated list of channels
channels =
%(IFO)s:ASC-AS_B_RF45_I_PIT_OUT_DQ
Expand Down Expand Up @@ -227,12 +230,14 @@ class HvetoConfigParser(configparser.ConfigParser):
'trigger-generator': 'Omicron',
'snr-threshold': 8,
'frequency-range': (30, 2048),
'read-format': 'ligolw.sngl_burst',
'read-format': 'ligolw',
'read-tablename': 'sngl_burst',
},
'auxiliary': {
'trigger-generator': 'Omicron',
'frequency-range': (30, 2048),
'read-format': 'ligolw.sngl_burst',
'read-format': 'ligolw',
'read-tablename': 'sngl_burst',
},
'safety': {
'unsafe-channels': ['%(IFO)s:GDS-CALIB_STRAIN',
Expand Down
3 changes: 2 additions & 1 deletion hveto/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@

from lxml import etree

from matplotlib import rcParams
from matplotlib.colors import LogNorm

from gwpy.plotter import (rcParams, HistogramPlot, EventTablePlot,
from gwpy.plotter import (HistogramPlot, EventTablePlot,
TimeSeriesPlot, Plot)
from gwpy.plotter.table import get_column_string

Expand Down
17 changes: 9 additions & 8 deletions hveto/triggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@
except ImportError:
from gwpy.table.io import trigfind

# Table metadata keys to keep
TABLE_META = ('tablename',)

# -- utilities ----------------------------------------------------------------

COLUMN_LABEL = {
Expand Down Expand Up @@ -96,8 +99,8 @@ def find_trigger_files(channel, etg, segments, **kwargs):
cache = Cache()
for start, end in segments:
try:
cache.extend(trigfind.find_trigger_urls(channel, etg, start,
end, **kwargs))
cache.extend(trigfind.find_trigger_files(channel, etg, start,
end, **kwargs))
except ValueError as e:
if str(e).lower().startswith('no channel-level directory'):
warnings.warn(str(e))
Expand Down Expand Up @@ -183,12 +186,10 @@ def get_triggers(channel, etg, segments, cache=None, snr=None, frange=None,
read_kwargs[key] = [x.strip(' ') for x in
read_kwargs[key].split(',')]

# set default columns for sngl_burst table (Omicron)
if read_kwargs.get('format', '') == 'ligolw.sngl_burst':
if read_kwargs.get('format', None) == 'ligolw':
read_kwargs.setdefault('use_numpy_dtypes', True)
if read_kwargs.get('tablename', None) == 'sngl_burst':
read_kwargs.setdefault('columns', ['peak', 'peak_frequency', 'snr'])
read_kwargs.setdefault('ligolw_columns', ['peak_time', 'peak_time_ns',
'peak_frequency', 'snr'])
read_kwargs.setdefault('get_as_columns', True)

# hacky fix for reading ASCII
# astropy's ASCII reader uses `include_names` and not `columns`
Expand Down Expand Up @@ -216,7 +217,7 @@ def get_triggers(channel, etg, segments, cache=None, snr=None, frange=None,
if len(segcache) == 1: # just pass the single filename
segcache = segcache[0].path
new = EventTable.read(segcache, **read_kwargs)
new.meta = {} # we never need the metadata
new.meta = {k: new.meta[k] for k in TABLE_META if new.meta.get(k)}
if outofbounds:
new = new[new[new.dtype.names[0]].in_segmentlist(segaslist)]
tables.append(new)
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ scipy
gitpython
jinja2
pykerberos
https://github.com/ligovirgo/trigfind/archive/v0.3.tar.gz
https://github.com/ligovirgo/trigfind/archive/v0.6.1.tar.gz
lscsoft-glue
dqsegdb
gwpy
git+https://github.com/ligovirgo/gwdetchar.git
lxml
Loading

0 comments on commit f5c6ec2

Please sign in to comment.