diff --git a/bin/minifollowups/pycbc_foreground_minifollowup b/bin/minifollowups/pycbc_foreground_minifollowup index 4d9a19d277e..eb710deef19 100644 --- a/bin/minifollowups/pycbc_foreground_minifollowup +++ b/bin/minifollowups/pycbc_foreground_minifollowup @@ -16,7 +16,14 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ Followup foreground events """ -import os, sys, argparse, logging, re, h5py, pycbc.workflow as wf +import os +import sys +import argparse +import logging +import re +import h5py + +import pycbc.workflow as wf from pycbc.results import layout from pycbc.types import MultiDetOptionAction from pycbc.events import select_segments_by_definer, coinc @@ -108,26 +115,34 @@ if args.sort_variable not in f[file_val]: raise KeyError(f'Sort variable {args.sort_variable} not in {file_val}: sort' f'choices in {file_val} are ' + ', '.join(all_datasets)) +# In case we are doing background minifollowup with repeated events, +# we must include the ordering / template / trigger / time info for +# _many_ more events to make sure we get enough events_to_read = num_events * 100 +# We've asked for more events than there are! if len(stat) < num_events: num_events = len(stat) if len(stat) < events_to_read: events_to_read = len(stat) +# Get the indices of the events we are considering in the order specified sorting = f[file_val + '/' + args.sort_variable][:].argsort() if args.sort_order == 'descending': sorting = sorting[::-1] event_idx = sorting[0:events_to_read] stat = stat[event_idx] +# Save the time / trigger / template ids for the events times = {} tids = {} -# Version used for multi-ifo coinc code ifo_list = f.attrs['ifos'].split(' ') +f_cat = f[file_val] for ifo in ifo_list: - times[ifo] = f[f'{file_val}/{ifo}/time'][:][event_idx] - tids[ifo] = f[f'{file_val}/{ifo}/trigger_id'][:][event_idx] + times[ifo] = f_cat[ifo]['time'][:][event_idx] + tids[ifo] = f_cat[ifo]['trigger_id'][:][event_idx] +bank_ids = f_cat['template_id'][:][event_idx] +f.close() bank_data = h5py.File(args.bank_file, 'r') @@ -187,7 +202,7 @@ while event_count < num_events and curr_idx < (events_to_read - 1): tags=[f'SKIP_{event_count}']),) skipped_data = [] - bank_id = f[f'{file_val}/template_id'][:][sorting][curr_idx] + bank_id = bank_ids[curr_idx] layouts += (mini.make_coinc_info(workflow, single_triggers, tmpltbank_file, coinc_file, args.output_dir, n_loudest=curr_idx, @@ -197,40 +212,33 @@ while event_count < num_events and curr_idx < (events_to_read - 1): ifo_times, args.output_dir, special_tids=ifo_tids, tags=args.tags + [str(event_count)]) - params = {} - for ifo in times: - params['%s_end_time' % ifo] = times[ifo][curr_idx] - try: - # Only present for precessing case - params['u_vals_%s' % ifo] = \ - fsdt[ifo][ifo]['u_vals'][tids[ifo][curr_idx]] - except: - pass - - params['mass1'] = bank_data['mass1'][bank_id] - params['mass2'] = bank_data['mass2'][bank_id] - params['spin1z'] = bank_data['spin1z'][bank_id] - params['spin2z'] = bank_data['spin2z'][bank_id] - params['f_lower'] = bank_data['f_lower'][bank_id] - # don't require precessing template info if not present - try: - params['spin1x'] = bank_data['spin1x'][bank_id] - params['spin1y'] = bank_data['spin1y'][bank_id] - params['spin2x'] = bank_data['spin2x'][bank_id] - params['spin2y'] = bank_data['spin2y'][bank_id] - params['inclination'] = bank_data['inclination'][bank_id] - except KeyError: - pass - - files += mini.make_single_template_plots(workflow, insp_segs, - args.inspiral_data_read_name, - args.inspiral_data_analyzed_name, params, - args.output_dir, - tags=args.tags + [str(event_count)]) + params = mini.get_single_template_params( + curr_idx, + times, + bank_data, + bank_ids[curr_idx], + fsdt, + tids + ) + + _, sngl_tmplt_plots = mini.make_single_template_plots( + workflow, + insp_segs, + args.inspiral_data_read_name, + args.inspiral_data_analyzed_name, + params, + args.output_dir, + data_segments=insp_data_seglists, + tags=args.tags + [str(event_count)] + ) + files += sngl_tmplt_plots + for single in single_triggers: time = times[single.ifo][curr_idx] if time==-1: + # If this detector did not trigger, still make the plot, but use + # the average time of detectors which did trigger time = coinc.mean_if_greater_than_zero([times[sngl.ifo][curr_idx] for sngl in single_triggers])[0] for seg in insp_analysed_seglists[single.ifo]: diff --git a/bin/minifollowups/pycbc_injection_minifollowup b/bin/minifollowups/pycbc_injection_minifollowup index d16e7c1cd71..37754cf6a20 100644 --- a/bin/minifollowups/pycbc_injection_minifollowup +++ b/bin/minifollowups/pycbc_injection_minifollowup @@ -217,6 +217,8 @@ for num_event in range(num_events): ifo_times += ' %s:%s ' % (ifo, ifo_time) inj_params[ifo + '_end_time'] = ifo_time + all_times = [inj_params[sngl.ifo + '_end_time'] for sngl in single_triggers] + inj_params['mean_time'] = coinc.mean_if_greater_than_zero(all_times)[0] layouts += [(mini.make_inj_info(workflow, injection_file, injection_index, num_event, args.output_dir, tags=args.tags + [str(num_event)])[0],)] @@ -256,8 +258,7 @@ for num_event in range(num_events): for single in single_triggers: checkedtime = time if (inj_params[single.ifo + '_end_time'] == -1.0): - all_times = [inj_params[sngl.ifo + '_end_time'] for sngl in single_triggers] - checkedtime = coinc.mean_if_greater_than_zero(all_times)[0] + checkedtime = inj_params['mean_time'] for seg in insp_analysed_seglists[single.ifo]: if checkedtime in seg: files += mini.make_singles_timefreq(workflow, single, tmpltbank_file, @@ -275,7 +276,7 @@ for num_event in range(num_events): 'skipping singles plots'.format(checkedtime, single.ifo)) - files += mini.make_single_template_plots(workflow, insp_segs, + _, norm_plot = mini.make_single_template_plots(workflow, insp_segs, args.inspiral_data_read_name, args.inspiral_data_analyzed_name, inj_params, args.output_dir, inj_file=injection_xml_file, @@ -283,8 +284,9 @@ for num_event in range(num_events): params_str='injection parameters as template, ' +\ 'here the injection is made as normal', use_exact_inj_params=True) + files += norm_plot - files += mini.make_single_template_plots(workflow, insp_segs, + _, inv_plot = mini.make_single_template_plots(workflow, insp_segs, args.inspiral_data_read_name, args.inspiral_data_analyzed_name, inj_params, args.output_dir, inj_file=injection_xml_file, @@ -293,8 +295,9 @@ for num_event in range(num_events): params_str='injection parameters as template, ' +\ 'here the injection is made inverted', use_exact_inj_params=True) + files += inv_plot - files += mini.make_single_template_plots(workflow, insp_segs, + _, noinj_plot = mini.make_single_template_plots(workflow, insp_segs, args.inspiral_data_read_name, args.inspiral_data_analyzed_name, inj_params, args.output_dir, inj_file=injection_xml_file, @@ -303,6 +306,7 @@ for num_event in range(num_events): params_str='injection parameters, here no ' +\ 'injection was actually performed', use_exact_inj_params=True) + files += noinj_plot for curr_ifo in args.single_detector_triggers: single_fname = args.single_detector_triggers[curr_ifo] @@ -341,12 +345,13 @@ for num_event in range(num_events): curr_tags = ['TMPLT_PARAMS_%s' %(curr_ifo,)] curr_tags += [str(num_event)] - files += mini.make_single_template_plots(workflow, insp_segs, + _, loudest_plot = mini.make_single_template_plots(workflow, insp_segs, args.inspiral_data_read_name, args.inspiral_data_analyzed_name, curr_params, args.output_dir, inj_file=injection_xml_file, tags=args.tags + curr_tags, params_str='loudest template in %s' % curr_ifo ) + files += loudest_plot layouts += list(layout.grouper(files, 2)) num_event += 1 diff --git a/bin/minifollowups/pycbc_sngl_minifollowup b/bin/minifollowups/pycbc_sngl_minifollowup index 01fbc743e83..4005802d08d 100644 --- a/bin/minifollowups/pycbc_sngl_minifollowup +++ b/bin/minifollowups/pycbc_sngl_minifollowup @@ -307,6 +307,7 @@ for rank, num_event in enumerate(order): curr_params['spin2z'] = trigs.spin2z[num_event] curr_params['f_lower'] = trigs.f_lower[num_event] curr_params[args.instrument + '_end_time'] = time + curr_params['mean_time'] = time # don't require precessing template info if not present try: curr_params['spin1x'] = trigs.spin1x[num_event] @@ -322,12 +323,16 @@ for rank, num_event in enumerate(order): except: pass - files += mini.make_single_template_plots(workflow, insp_segs, + _, sngl_plot = mini.make_single_template_plots(workflow, insp_segs, args.inspiral_data_read_name, - args.inspiral_data_analyzed_name, curr_params, + args.inspiral_data_analyzed_name, + curr_params, args.output_dir, + data_segments={args.instrument : insp_data_seglists}, tags=args.tags+[str(rank)]) + files += sngl_plot + files += mini.make_plot_waveform_plot(workflow, curr_params, args.output_dir, [args.instrument], tags=args.tags + [str(rank)]) diff --git a/bin/minifollowups/pycbc_upload_prep_minifollowup b/bin/minifollowups/pycbc_upload_prep_minifollowup new file mode 100644 index 00000000000..1aec8de3025 --- /dev/null +++ b/bin/minifollowups/pycbc_upload_prep_minifollowup @@ -0,0 +1,200 @@ +#!/bin/env python +# Copyright (C) 2015-2023 Alexander Harvey Nitz, Gareth Cabourn Davies +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +""" Prepare files for upload to GraceDB for foreground events +""" +import os +import sys +import argparse +import logging +import re +import h5py +import numpy as np + +from ligo import segments + +import pycbc.workflow as wf +from pycbc.results import layout +from pycbc.types import MultiDetOptionAction +from pycbc.events import select_segments_by_definer, coinc +from pycbc.io import get_all_subkeys +import pycbc.workflow.minifollowups as mini +from pycbc.workflow.core import resolve_url_to_file, resolve_td_option +import pycbc.version + +parser = argparse.ArgumentParser(description=__doc__[1:]) +parser.add_argument('--verbose', action='count', + help='Add progressively more verbose output, ' + 'default=info') +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) +parser.add_argument('--bank-file', + help="HDF format template bank file") +parser.add_argument('--statmap-file', + help="HDF format clustered coincident trigger result file") +parser.add_argument('--xml-all-file', + help="XML format result file containing all events") +parser.add_argument('--single-detector-triggers', nargs='+', action=MultiDetOptionAction, + help="HDF format merged single detector trigger files") +parser.add_argument('--inspiral-segments', + help="xml segment files containing the inspiral analysis times") +parser.add_argument('--inspiral-data-read-name', + help="Name of inspiral segmentlist containing data read in " + "by each analysis job.") +parser.add_argument('--inspiral-data-analyzed-name', + help="Name of inspiral segmentlist containing data " + "analyzed by each analysis job.") +parser.add_argument('--psd-files', nargs='+', action=MultiDetOptionAction, + help="HDF format merged single detector PSD files") +parser.add_argument('--ifar-thresh', type=float, + help="IFAR threshold for preparing SNR timeseries " + "files for upload. Default=No upload prep") + +wf.add_workflow_command_line_group(parser) +wf.add_workflow_settings_cli(parser, include_subdax_opts=True) +args = parser.parse_args() + +if args.verbose: + args.verbose += 1 +else: + args.verbose = 1 +pycbc.init_logging(args.verbose) + +workflow = wf.Workflow(args) + +wf.makedir(args.output_dir) + +channel_opts = {} +for ifo in workflow.ifos: + channel_opts[ifo] = workflow.cp.get_opt_tags( + "workflow", + "%s-channel-name" % ifo.lower(), + "" + ) + +# create a FileList that will contain all output files +layouts = [] + +tmpltbank_file = resolve_url_to_file(os.path.abspath(args.bank_file)) +insp_segs = resolve_url_to_file(os.path.abspath(args.inspiral_segments)) +xml_all = resolve_url_to_file(os.path.abspath(args.xml_all_file)) + +single_triggers = [] +psd_files = [] +fsdt = {} +insp_data_seglists = {} +insp_analysed_seglists = {} +for ifo in args.single_detector_triggers: + strig_fname = args.single_detector_triggers[ifo] + strig_file = resolve_url_to_file(os.path.abspath(strig_fname), + attrs={'ifos': ifo}) + single_triggers.append(strig_file) + + psd_fname = args.psd_files[ifo] + psd_file = resolve_url_to_file(os.path.abspath(psd_fname), + attrs={'ifos': ifo}) + psd_files.append(psd_file) + + fsdt[ifo] = h5py.File(args.single_detector_triggers[ifo], 'r') + insp_data_seglists[ifo] = select_segments_by_definer( + args.inspiral_segments, + segment_name=args.inspiral_data_read_name, + ifo=ifo) + insp_analysed_seglists[ifo] = select_segments_by_definer( + args.inspiral_segments, + segment_name=args.inspiral_data_analyzed_name, + ifo=ifo) + insp_data_seglists[ifo].coalesce() + insp_analysed_seglists[ifo].coalesce() + +f = h5py.File(args.statmap_file, 'r') +stat = f['foreground/stat'][:] + +bank_data = h5py.File(args.bank_file, 'r') + +ifar_limit = args.ifar_thresh +# Get indices of all events which pass the IFAR threshold +event_ifars = f['foreground/ifar'][:] +events_to_read = np.count_nonzero(event_ifars > ifar_limit) +# Sort by IFAR, descending +event_idx = event_ifars.argsort()[::-1][:events_to_read] +# Times and tids need to be reset for this set of events: +times = {} +tids = {} +bank_ids = {} + +ifo_list = f.attrs['ifos'].split(' ') +for ifo in ifo_list: + times[ifo] = f[f'foreground/{ifo}/time'][:][event_idx] + tids[ifo] = f[f'foreground/{ifo}/trigger_id'][:][event_idx] +bank_ids = f['foreground/template_id'][:][event_idx] + +for curr_idx in range(event_idx.size): + params = mini.get_single_template_params( + curr_idx, + times, + bank_data, + bank_ids[curr_idx], + fsdt, + tids + ) + + # Extract approximant + try: + appx = params.pop('approximant') + except KeyError: + # approximant not stored in params, use default + appx = None + + channel_name = "" + for ifo in ifo_list: + ifo_chname = resolve_td_option( + channel_opts[ifo], + segments.segment(params['mean_time'], params['mean_time']) + ) + channel_name += ifo_chname + " " + + single_temp_files = [] + for ifo in ifo_list: + if params['mean_time'] not in insp_analysed_seglists[ifo]: + logging.info("Mean time %.3f not in segment list", + params['mean_time']) + continue + # Make single-template files to put into the XML file for upload + single_temp_files += mini.make_single_template_files( + workflow, + insp_segs, + ifo, + args.inspiral_data_read_name, + args.inspiral_data_analyzed_name, + params, + args.output_dir, + store_file=True, + tags=args.tags+['upload', str(curr_idx)], + ) + + mini.make_upload_files( + workflow, + psd_files, + single_temp_files, + xml_all, + curr_idx, + appx, + args.output_dir, + channel_name, + tags=args.tags+['upload', str(curr_idx)] + ) + +workflow.save() diff --git a/bin/workflows/pycbc_make_offline_search_workflow b/bin/workflows/pycbc_make_offline_search_workflow index 14f0a9bcaa1..611520c5c69 100755 --- a/bin/workflows/pycbc_make_offline_search_workflow +++ b/bin/workflows/pycbc_make_offline_search_workflow @@ -480,6 +480,17 @@ table = wf.make_foreground_table(workflow, combined_bg_file, fore_xmlall = wf.make_foreground_table(workflow, combined_bg_file, hdfbank, rdir['open_box_result'], singles=insps, extension='.xml', tags=["xmlall"]) + +if workflow.cp.has_option_tags('workflow-minifollowups', + 'prepare-gracedb-uploads', ''): + # Need to get absolute path here + upload_path = os.path.join(workflow.out_dir, 'upload_data') + wf.setup_upload_prep_minifollowups(workflow, combined_bg_file, fore_xmlall, + full_insps, psd_files, hdfbank, insp_files_seg_file, + data_analysed_name, trig_generated_name, + 'daxes', upload_path, + tags=combined_bg_file.tags) + fore_xmlloudest = wf.make_foreground_table(workflow, combined_bg_file, hdfbank, rdir['open_box_result'], singles=insps, extension='.xml', tags=["xmlloudest"]) diff --git a/pycbc/workflow/minifollowups.py b/pycbc/workflow/minifollowups.py index 5e59533da5d..947742d9ad3 100644 --- a/pycbc/workflow/minifollowups.py +++ b/pycbc/workflow/minifollowups.py @@ -16,6 +16,7 @@ import logging, os.path from ligo import segments +from pycbc.events import coinc from pycbc.workflow.core import Executable, FileList from pycbc.workflow.core import makedir, resolve_url_to_file from pycbc.workflow.plotting import PlotExecutable, requirestr, excludestr @@ -35,7 +36,8 @@ def grouper(iterable, n, fillvalue=None): def setup_foreground_minifollowups(workflow, coinc_file, single_triggers, tmpltbank_file, insp_segs, insp_data_name, - insp_anal_name, dax_output, out_dir, tags=None): + insp_anal_name, dax_output, out_dir, + tags=None): """ Create plots that followup the Nth loudest coincident injection from a statmap produced HDF file. @@ -56,6 +58,8 @@ def setup_foreground_minifollowups(workflow, coinc_file, single_triggers, The name of the segmentlist storing data read. insp_anal_name: str The name of the segmentlist storing data analyzed. + dax_output : directory + Location of the dax outputs out_dir: path The directory to store minifollowups result plots and files tags: {None, optional} @@ -114,7 +118,8 @@ def setup_foreground_minifollowups(workflow, coinc_file, single_triggers, # determine if a staging site has been specified job = SubWorkflow(fil.name, is_planned=False) - input_files = [tmpltbank_file, coinc_file, insp_segs] + single_triggers + input_files = [tmpltbank_file, coinc_file, insp_segs] + \ + single_triggers job.add_inputs(*input_files) job.set_subworkflow_properties(map_file, staging_site=workflow.staging_site, @@ -344,10 +349,185 @@ class PlotQScanExecutable(PlotExecutable): time_dependent_options = ['--channel-name', '--frame-type'] +def get_single_template_params(curr_idx, times, bank_data, + bank_id, fsdt, tids): + """ + A function to get the parameters needed for the make_single_template_files + function. + + Parameters + ---------- + curr_idx : int + The index of the event in the file + times : dictionary keyed on IFO of numpy arrays, dtype float + The array of trigger times for each detector + bank_data : dictionary or h5py file + Structure containing the bank information + bank_id : int + The template index within the bank + fsdt : dictionary of h5py files, keyed on IFO + The single-detector TRIGGER_MERGE files, keyed by IFO + tids : dictionary keyed on IFO of numpy arrays, dtype int + The trigger indexes in fsdt for each IFO + + Returns + ------- + params : dictionary + A dictionary containing the parameters needed for the event used + + """ + params = {} + for ifo in times: + params['%s_end_time' % ifo] = times[ifo][curr_idx] + try: + # Only present for precessing, so may not exist + params['u_vals_%s' % ifo] = \ + fsdt[ifo][ifo]['u_vals'][tids[ifo][curr_idx]] + except: + pass + + params['mean_time'] = coinc.mean_if_greater_than_zero( + [times[ifo][curr_idx] for ifo in times] + )[0] + + params['mass1'] = bank_data['mass1'][bank_id] + params['mass2'] = bank_data['mass2'][bank_id] + params['spin1z'] = bank_data['spin1z'][bank_id] + params['spin2z'] = bank_data['spin2z'][bank_id] + params['f_lower'] = bank_data['f_lower'][bank_id] + if 'approximant' in bank_data: + params['approximant'] = bank_data['approximant'][bank_id] + # don't require precessing template info if not present + try: + params['spin1x'] = bank_data['spin1x'][bank_id] + params['spin1y'] = bank_data['spin1y'][bank_id] + params['spin2x'] = bank_data['spin2x'][bank_id] + params['spin2y'] = bank_data['spin2y'][bank_id] + params['inclination'] = bank_data['inclination'][bank_id] + except KeyError: + pass + return params + + +def make_single_template_files(workflow, segs, ifo, data_read_name, + analyzed_name, params, out_dir, inj_file=None, + exclude=None, require=None, tags=None, + store_file=False, use_mean_time=False, + use_exact_inj_params=False): + """Function for creating jobs to run the pycbc_single_template code and + add these jobs to the workflow. + + Parameters + ----------- + workflow : workflow.Workflow instance + The pycbc.workflow.Workflow instance to add these jobs to. + segs : workflow.File instance + The pycbc.workflow.File instance that points to the XML file containing + the segment lists of data read in and data analyzed. + ifo: str + The name of the interferometer + data_read_name : str + The name of the segmentlist containing the data read in by each + inspiral job in the segs file. + analyzed_name : str + The name of the segmentlist containing the data analyzed by each + inspiral job in the segs file. + params : dictionary + A dictionary containing the parameters of the template to be used. + params[ifo+'end_time'] is required for all ifos in workflow.ifos. + If use_exact_inj_params is False then also need to supply values for + [mass1, mass2, spin1z, spin2x]. For precessing templates one also + needs to supply [spin1y, spin1x, spin2x, spin2y, inclination] + additionally for precession one must supply u_vals or + u_vals_+ifo for all ifos. u_vals is the ratio between h_+ and h_x to + use when constructing h(t). h(t) = (h_+ * u_vals) + h_x. + out_dir : str + Directory in which to store the output files. + inj_file : workflow.File (optional, default=None) + If given send this injection file to the job so that injections are + made into the data. + exclude : list (optional, default=None) + If given, then when considering which subsections in the ini file to + parse for options to add to single_template_plot, only use subsections + that *do not* match strings in this list. + require : list (optional, default=None) + If given, then when considering which subsections in the ini file to + parse for options to add to single_template_plot, only use subsections + matching strings in this list. + tags : list (optional, default=None) + The tags to use for this job. + store_file : boolean (optional, default=False) + Keep the output files of this job. + use_mean_time : boolean (optional, default=False) + Use the mean time as the center time for all ifos + use_exact_inj_params : boolean (optional, default=False) + If True do not use masses and spins listed in the params dictionary + but instead use the injection closest to the filter time as a template. + + Returns + -------- + output_files : workflow.FileList + The list of workflow.Files created in this function. + """ + tags = [] if tags is None else tags + makedir(out_dir) + name = 'single_template' + secs = requirestr(workflow.cp.get_subsections(name), require) + secs = excludestr(secs, exclude) + secs = excludestr(secs, workflow.ifo_combinations) + # Reanalyze the time around the trigger in each detector + curr_exe = SingleTemplateExecutable(workflow.cp, 'single_template', + ifos=[ifo], out_dir=out_dir, + tags=tags) + start = int(params[ifo + '_end_time']) + end = start + 1 + cseg = segments.segment([start, end]) + node = curr_exe.create_node(valid_seg=cseg) + + if use_exact_inj_params: + node.add_opt('--use-params-of-closest-injection') + else: + node.add_opt('--mass1', "%.6f" % params['mass1']) + node.add_opt('--mass2', "%.6f" % params['mass2']) + node.add_opt('--spin1z',"%.6f" % params['spin1z']) + node.add_opt('--spin2z',"%.6f" % params['spin2z']) + node.add_opt('--template-start-frequency', + "%.6f" % params['f_lower']) + # Is this precessing? + if 'u_vals' in params or 'u_vals_%s' % ifo in params: + node.add_opt('--spin1x',"%.6f" % params['spin1x']) + node.add_opt('--spin1y',"%.6f" % params['spin1y']) + node.add_opt('--spin2x',"%.6f" % params['spin2x']) + node.add_opt('--spin2y',"%.6f" % params['spin2y']) + node.add_opt('--inclination',"%.6f" % params['inclination']) + try: + node.add_opt('--u-val',"%.6f" % params['u_vals']) + except: + node.add_opt('--u-val', + "%.6f" % params['u_vals_%s' % ifo]) + + if params[ifo + '_end_time'] > 0 and not use_mean_time: + trig_time = params[ifo + '_end_time'] + else: + trig_time = params['mean_time'] + + node.add_opt('--trigger-time', f"{trig_time:.6f}") + node.add_input_opt('--inspiral-segments', segs) + if inj_file is not None: + node.add_input_opt('--injection-file', inj_file) + node.add_opt('--data-read-name', data_read_name) + node.add_opt('--data-analyzed-name', analyzed_name) + node.new_output_file_opt(workflow.analysis_time, '.hdf', + '--output-file', store_file=store_file) + workflow += node + return node.output_files + + def make_single_template_plots(workflow, segs, data_read_name, analyzed_name, - params, out_dir, inj_file=None, exclude=None, - require=None, tags=None, params_str=None, - use_exact_inj_params=False): + params, out_dir, inj_file=None, exclude=None, + data_segments=None, + require=None, tags=None, params_str=None, + use_exact_inj_params=False): """Function for creating jobs to run the pycbc_single_template code and to run the associated plotting code pycbc_single_template_plots and add these jobs to the workflow. @@ -387,6 +567,10 @@ def make_single_template_plots(workflow, segs, data_read_name, analyzed_name, If given, then when considering which subsections in the ini file to parse for options to add to single_template_plot, only use subsections matching strings in this list. + data_segments : dictionary of segment lists + Dictionary of segment lists keyed on the IFO. Used to decide if an + IFO is plotted if there is valid data. If not given, will plot if + the IFO produced a trigger which contributed to the event tags : list (optional, default=None) Add this list of tags to all jobs. params_str : str (optional, default=None) @@ -398,8 +582,12 @@ def make_single_template_plots(workflow, segs, data_read_name, analyzed_name, Returns -------- - output_files : workflow.FileList - The list of workflow.Files created in this function. + hdf_files : workflow.FileList + The list of workflow.Files created by single_template jobs + in this function. + plot_files : workflow.FileList + The list of workflow.Files created by single_template_plot jobs + in this function. """ tags = [] if tags is None else tags makedir(out_dir) @@ -407,58 +595,37 @@ def make_single_template_plots(workflow, segs, data_read_name, analyzed_name, secs = requirestr(workflow.cp.get_subsections(name), require) secs = excludestr(secs, exclude) secs = excludestr(secs, workflow.ifo_combinations) - files = FileList([]) + hdf_files = FileList([]) + plot_files = FileList([]) + valid = {} + for ifo in workflow.ifos: + valid[ifo] = params['mean_time'] in data_segments[ifo] if data_segments \ + else params['%s_end_time' % ifo] > 0 for tag in secs: for ifo in workflow.ifos: - if params['%s_end_time' % ifo] == -1.0: + if not valid[ifo]: + # If the IFO is not being used, continue continue - # Reanalyze the time around the trigger in each detector - curr_exe = SingleTemplateExecutable(workflow.cp, 'single_template', - ifos=[ifo], out_dir=out_dir, - tags=[tag] + tags) - start = int(params[ifo + '_end_time']) - end = start + 1 - cseg = segments.segment([start, end]) - node = curr_exe.create_node(valid_seg=cseg) - - if use_exact_inj_params: - node.add_opt('--use-params-of-closest-injection') - else: - node.add_opt('--mass1', "%.6f" % params['mass1']) - node.add_opt('--mass2', "%.6f" % params['mass2']) - node.add_opt('--spin1z',"%.6f" % params['spin1z']) - node.add_opt('--spin2z',"%.6f" % params['spin2z']) - node.add_opt('--template-start-frequency', - "%.6f" % params['f_lower']) - # Is this precessing? - if 'u_vals' in params or 'u_vals_%s' % ifo in params: - node.add_opt('--spin1x',"%.6f" % params['spin1x']) - node.add_opt('--spin1y',"%.6f" % params['spin1y']) - node.add_opt('--spin2x',"%.6f" % params['spin2x']) - node.add_opt('--spin2y',"%.6f" % params['spin2y']) - node.add_opt('--inclination',"%.6f" % params['inclination']) - try: - node.add_opt('--u-val',"%.6f" % params['u_vals']) - except: - node.add_opt('--u-val', - "%.6f" % params['u_vals_%s' % ifo]) - - # str(numpy.float64) restricts to 2d.p. BE CAREFUL WITH THIS!!! - str_trig_time = '%.6f' %(params[ifo + '_end_time']) - node.add_opt('--trigger-time', str_trig_time) - node.add_input_opt('--inspiral-segments', segs) - if inj_file is not None: - node.add_input_opt('--injection-file', inj_file) - node.add_opt('--data-read-name', data_read_name) - node.add_opt('--data-analyzed-name', analyzed_name) - node.new_output_file_opt(workflow.analysis_time, '.hdf', - '--output-file', store_file=False) - data = node.output_files[0] - workflow += node + data = make_single_template_files( + workflow, + segs, + ifo, + data_read_name, + analyzed_name, + params, + out_dir, + inj_file=inj_file, + exclude=exclude, + require=require, + tags=tags + [tag], + store_file=False, + use_exact_inj_params=use_exact_inj_params + ) + hdf_files += data # Make the plot for this trigger and detector node = PlotExecutable(workflow.cp, name, ifos=[ifo], out_dir=out_dir, tags=[tag] + tags).create_node() - node.add_input_opt('--single-template-file', data) + node.add_input_opt('--single-template-file', data[0]) node.new_output_file_opt(workflow.analysis_time, '.png', '--output-file') title="'%s SNR and chi^2 timeseries" %(ifo) @@ -478,8 +645,8 @@ def make_single_template_plots(workflow, segs, data_read_name, analyzed_name, params['spin2z']) node.add_opt('--plot-caption', caption) workflow += node - files += node.output_files - return files + plot_files += node.output_files + return hdf_files, plot_files def make_plot_waveform_plot(workflow, params, out_dir, ifos, exclude=None, require=None, tags=None): @@ -529,7 +696,7 @@ def make_inj_info(workflow, injection_file, injection_index, num, out_dir, files += node.output_files return files -def make_coinc_info(workflow, singles, bank, coinc, out_dir, +def make_coinc_info(workflow, singles, bank, coinc_file, out_dir, n_loudest=None, trig_id=None, file_substring=None, sort_order=None, sort_var=None, title=None, tags=None): tags = [] if tags is None else tags @@ -539,7 +706,7 @@ def make_coinc_info(workflow, singles, bank, coinc, out_dir, node = PlotExecutable(workflow.cp, name, ifos=workflow.ifos, out_dir=out_dir, tags=tags).create_node() node.add_input_list_opt('--single-trigger-files', singles) - node.add_input_opt('--statmap-file', coinc) + node.add_input_opt('--statmap-file', coinc_file) node.add_input_opt('--bank-file', bank) if sort_order: node.add_opt('--sort-order', sort_order) @@ -852,3 +1019,222 @@ def make_skipped_html(workflow, skipped_data, out_dir, tags): workflow += node files = node.output_files return files + + +def make_upload_files(workflow, psd_files, snr_timeseries, xml_all, + event_id, approximant, out_dir, channel_name, + tags=None): + """ + Make files including xml, skymap fits and plots for uploading to gracedb + for a given event + + Parameters + ---------- + psd_files: FileList([]) + PSD Files from MERGE_PSDs for the search as appropriate for the + event + snr_timeseries: FileList([]) + SNR timeseries files, one from each IFO, to add to the XML and plot + output from pysbs_single_template + xml_all: pycbc.workflow.core.File instance + XML file containing all events from the search + event_id: string + an integer to describe the event's position in the xml_all file + approximant: byte string + The approximant used for the template of the event, to be passed + to bayestar for sky location + out_dir: + The directory where all the output files should go + channel_name: string + Channel name to be added to the XML file to be uploaded + tags: {None, optional} + Tags to add to the minifollowups executables + + Returns + ------- + all_output_files: FileList + List of all output files from this process + """ + indiv_xml_exe = Executable( + workflow.cp, + 'generate_xml', + ifos=workflow.ifos, out_dir=out_dir, + tags=tags + ) + + xml_node = indiv_xml_exe.create_node() + xml_node.add_input_opt('--input-file', xml_all) + xml_node.add_opt('--event-id', event_id) + xml_node.add_input_list_opt('--psd-files', psd_files) + xml_node.add_input_list_opt('--snr-timeseries', snr_timeseries) + xml_node.add_opt('--channel-name', channel_name) + xml_node.new_output_file_opt( + workflow.analysis_time, + '.png', + '--snr-timeseries-plot', + tags=['snr'] + ) + xml_node.new_output_file_opt( + workflow.analysis_time, + '.png', + '--psd-plot', + tags=['psd'] + ) + xml_out = xml_node.new_output_file_opt( + workflow.analysis_time, + '.xml', + '--output-file' + ) + + workflow += xml_node + + bayestar_exe = Executable( + workflow.cp, + 'bayestar', + ifos=workflow.ifos, + out_dir=out_dir, + tags=tags + ) + + bayestar_node = bayestar_exe.create_node() + bayestar_node.add_input_opt('--event-xml', xml_out) + fits_out = bayestar_node.new_output_file_opt( + workflow.analysis_time, + '.fits', + '--output-file', + ) + + if approximant == b'SPAtmplt': + # Bayestar doesn't use the SPAtmplt approximant + approximant = b'TaylorF2' + if approximant is not None: + bayestar_node.add_opt('--waveform', approximant.decode()) + + workflow += bayestar_node + + skymap_plot_exe = PlotExecutable( + workflow.cp, + 'skymap_plot', + ifos=workflow.ifos, + out_dir=out_dir, + tags=tags + ) + + skymap_plot_node = skymap_plot_exe.create_node() + skymap_plot_node.add_input_opt('', fits_out) + skymap_plot_node.new_output_file_opt( + workflow.analysis_time, + '.png', + '-o', + ) + workflow += skymap_plot_node + + all_output_files = xml_node.output_files + bayestar_node.output_files + \ + skymap_plot_node.output_files + return all_output_files + + +def setup_upload_prep_minifollowups(workflow, coinc_file, xml_all_file, + single_triggers, psd_files, + tmpltbank_file, insp_segs, insp_data_name, + insp_anal_name, dax_output, out_dir, + tags=None): + """ Create plots that followup the Nth loudest coincident injection + from a statmap produced HDF file. + + Parameters + ---------- + workflow: pycbc.workflow.Workflow + The core workflow instance we are populating + coinc_file: + single_triggers: list of pycbc.workflow.File + A list cointaining the file objects associated with the merged + single detector trigger files for each ifo. + psd_files: list of pycbc.workflow.File + A list containing the file objects associated with the merged + psd files for each ifo. + xml_all_file : workflow file object + XML File containing all foreground events + tmpltbank_file: pycbc.workflow.File + The file object pointing to the HDF format template bank + insp_segs: SegFile + The segment file containing the data read and analyzed by each inspiral + job. + The segment file containing the data read and analyzed by each inspiral + job. + insp_data_name: str + The name of the segmentlist storing data read. + insp_anal_name: str + The name of the segmentlist storing data analyzed. + dax_output : directory + Location of the dax outputs + out_dir: path + The directory to store minifollowups result plots and files + tags: {None, optional} + Tags to add to the minifollowups executables + + Returns + ------- + layout: list + A list of tuples which specify the displayed file layout for the + minifollowups plots. + """ + logging.info('Entering minifollowups module') + + if not workflow.cp.has_section('workflow-minifollowups'): + logging.info('There is no [workflow-minifollowups] section in configuration file') + logging.info('Leaving minifollowups') + return + + tags = [] if tags is None else tags + makedir(dax_output) + makedir(out_dir) + + # turn the config file into a File class + config_path = os.path.abspath(dax_output + '/' + '_'.join(tags) + \ + 'upload_prep_minifollowup.ini') + workflow.cp.write(open(config_path, 'w')) + + config_file = resolve_url_to_file(config_path) + + exe = Executable(workflow.cp, 'upload_prep_minifollowup', + ifos=workflow.ifos, out_dir=dax_output, tags=tags) + + node = exe.create_node() + node.add_input_opt('--config-files', config_file) + node.add_input_opt('--xml-all-file', xml_all_file) + node.add_input_opt('--bank-file', tmpltbank_file) + node.add_input_opt('--statmap-file', coinc_file) + node.add_multiifo_input_list_opt('--single-detector-triggers', + single_triggers) + node.add_multiifo_input_list_opt('--psd-files', psd_files) + node.add_input_opt('--inspiral-segments', insp_segs) + node.add_opt('--inspiral-data-read-name', insp_data_name) + node.add_opt('--inspiral-data-analyzed-name', insp_anal_name) + if tags: + node.add_list_opt('--tags', tags) + node.new_output_file_opt(workflow.analysis_time, '.dax', '--dax-file') + node.new_output_file_opt(workflow.analysis_time, '.dax.map', '--output-map') + + name = node.output_files[0].name + map_file = node.output_files[1] + + node.add_opt('--workflow-name', name) + node.add_opt('--output-dir', out_dir) + node.add_opt('--dax-file-directory', '.') + + workflow += node + + # execute this in a sub-workflow + fil = node.output_files[0] + + # determine if a staging site has been specified + job = SubWorkflow(fil.name, is_planned=False) + input_files = [xml_all_file, tmpltbank_file, coinc_file, insp_segs] + \ + single_triggers + psd_files + job.add_inputs(*input_files) + job.set_subworkflow_properties(map_file, + staging_site=workflow.staging_site, + cache_file=workflow.cache_file) + job.add_into_workflow(workflow) + logging.info('Leaving minifollowups module') diff --git a/pycbc/workflow/psd.py b/pycbc/workflow/psd.py index c934ed8620f..2c733a32405 100644 --- a/pycbc/workflow/psd.py +++ b/pycbc/workflow/psd.py @@ -73,10 +73,7 @@ def setup_psd_calculate(workflow, frame_files, ifo, segments, segment_name, out_dir, tags=tags + ['PART%s' % i])] - if num_parts > 1: - return merge_psds(workflow, psd_files, ifo, out_dir, tags=tags) - else: - return psd_files[0] + return merge_psds(workflow, psd_files, ifo, out_dir, tags=tags) def make_psd_file(workflow, frame_files, segment_file, segment_name, out_dir, tags=None):