From 132e9da64ef935810eb19ec565703c3689e3236e Mon Sep 17 00:00:00 2001 From: Ian Harry Date: Tue, 9 Jan 2024 10:30:12 +0000 Subject: [PATCH] Prepare v2.3.3 release (#4596) * Use gating windows in sngl_minifollowup and allow 'triggers within vetoes' option (#4549) * Use gating windows in sngl_minifollowup - also allow 'triggers within vetoes' option * Minor fixes * add infor if fewer triggers than requested * Allow pycbc_sngls_minifollowup to use single ranking statistic * Fix bug in rank_stat_single for quadsum/phasetd statistics * mask may be boolean now * fix broken pycbc_page_snglinfo due to bugfix * page_snglinfo to use the ranking statistic used * missed change * Fix cases where ifar limit was not being applied (#4574) * Fix cases where ifar limit was not being applied * some more missed cases / long lines * redoing a couple more long lines * missed one more place * unrelated bug where the IFOs were not in the right order for use with the significance_dict * remove removal of downranked triggers (#4579) * remove removal of downranked triggers * add brief explanation into the caption * bug in choosing the far calculation method (#4593) * bug in which IFO combo is used for calculating the FAR in each combination during HR * Safety catch * Update release number * add coordinates_space.py (#4289) * add coordinates_space.py * add LISA/SSB frame params * add LISA_to_GEO and GEO_to_LISA * add coordinates_space into FieldArray * add doc and Astropy support * update comments on sympy * use fsolve from scipy instead * fix cc issues * fix cc issues * minor fix * update * not use iteration * decouple LISA orbit and more accurate Earth * rename * remove jplephem * add the angular displacement of the Earth * use radians * make func readable in .ini * reverse back to master * correct psi range * reverse to master * fix unit issue in earth_position_SSB * put LISA to the "right" position * add LISA specific transform classes here * change names * update * make a package for coordinates * remove coordinates_space import * move __all__ into __init__.py * remove all coordinates_space * change TIME_OFFSET to seconds * fix SOBHB issue * rename * add SSB or LISA params into fid_params * rename * fix cc issues * fix cc issue * fix cc issue * update * update * fix * add default names * overwrite params with same names * remove pre-fixed names * remove all pre-fixed names * not pop * fix inverse transform * update tc * not overwrite * add SNR support for multi-model * Update waveform.py * t0 issue * t0 issue * Update space.py * add obstime * np.mod(psi_newframe, 2*np.pi) * fix obstime * add support for array inputs * Update hierarchical.py * just use Alex's implementation * CustomTransformMultiOutputs is in another PR, so remove it * add LDC and LAL convention correction * use pycbc standard names * more meaningful name * use pycbc standard names * Update relbin.py * Update parameters.py * remove unnecessary changes * fix cc issue * fix cc issue * fix cc issue * fix cc issue * compactify * compactify * add __all__ back * Update transforms.py * Update transforms.py * Update test_transforms.py * Update transforms.py * update doc * fix time warning * Update space.py * Update test_transforms.py * Create test_coordinates_space.py * fix cc issues * fix cc issues * fix cc issue * Update tox.ini * Update tox.ini * Update tox.ini * Update tox.ini * Update tox.ini * Update tox.ini * Update tox.ini * Update test_coordinates_space.py * add inline doc * Update tox.ini * add check of bbhx * Update test_coordinates_space.py * Update tox.ini * Update test_coordinates_space.py * add MultibandRelativeTimeDom into hierarchical.py * Update __init__.py * Update hierarchical.py * Update hierarchical.py * Update relbin.py * Update hierarchical.py * Update hierarchical.py * Update relbin.py * Update hierarchical.py * Update hierarchical.py * Update hierarchical.py * Update hierarchical.py * Update hierarchical.py * Update relbin.py * Update hierarchical.py * Update hierarchical.py * Update relbin.py * Update __init__.py * Update space.py * Update space.py * Update space.py * fix psi issue * Update test_coordinates_space.py * Update test_coordinates_space.py * update lalsimulation cvmfs path (#4580) * update lalsimulation cvmfs path * missed that the mount location needs to be changed as well * more references to previsou CVMFS location * Revert "add coordinates_space.py (#4289)" This reverts commit e3418c70d8da796fb8b4932fb7f0b22d82621bf0. * REmoving lisa examples * REmove inference examples * Remove LISA deps * Removing more LISA things --------- Co-authored-by: Gareth S Cabourn Davies Co-authored-by: Shichao Wu --- Dockerfile | 4 +- bin/all_sky_search/pycbc_add_statmap | 36 +++- bin/all_sky_search/pycbc_coinc_statmap | 36 +++- bin/all_sky_search/pycbc_coinc_statmap_inj | 21 ++- bin/all_sky_search/pycbc_sngls_statmap | 30 ++- bin/all_sky_search/pycbc_sngls_statmap_inj | 10 +- bin/minifollowups/pycbc_page_snglinfo | 7 +- .../pycbc_plot_trigger_timeseries | 19 +- bin/minifollowups/pycbc_sngl_minifollowup | 172 +++++++++++++++--- .../pycbc_make_offline_search_workflow | 5 +- docker/etc/docker-install.sh | 6 +- .../inference_example_lisa_smbhb_inj.sh | 6 - .../inference_example_lisa_smbhb_ldc.sh | 5 - docs/building_bundled_executables.rst | 4 +- docs/inference.rst | 2 - docs/inference/examples/lisa_smbhb_inj_pe.rst | 66 ------- docs/inference/examples/lisa_smbhb_ldc_pe.rst | 70 ------- docs/inference/models.rst | 12 -- docs/install.rst | 4 +- docs/install_lalsuite.rst | 2 +- .../pycbc_make_offline_search_workflow.rst | 4 +- pycbc/events/stat.py | 10 +- pycbc/io/hdf.py | 3 +- pycbc/workflow/minifollowups.py | 12 +- setup.py | 2 +- tools/cvmfs-default.local | 2 +- tox.ini | 3 - 27 files changed, 306 insertions(+), 247 deletions(-) delete mode 100644 docs/_include/inference_example_lisa_smbhb_inj.sh delete mode 100644 docs/_include/inference_example_lisa_smbhb_ldc.sh delete mode 100644 docs/inference/examples/lisa_smbhb_inj_pe.rst delete mode 100644 docs/inference/examples/lisa_smbhb_ldc_pe.rst diff --git a/Dockerfile b/Dockerfile index 7002c97072a..97997bc5ab9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,7 +15,7 @@ RUN dnf -y install https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release- # set up environment RUN cd / && \ - mkdir -p /cvmfs/config-osg.opensciencegrid.org /cvmfs/oasis.opensciencegrid.org /cvmfs/gwosc.osgstorage.org && echo "config-osg.opensciencegrid.org /cvmfs/config-osg.opensciencegrid.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "oasis.opensciencegrid.org /cvmfs/oasis.opensciencegrid.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "gwosc.osgstorage.org /cvmfs/gwosc.osgstorage.org cvmfs ro,noauto 0 0" >> /etc/fstab && mkdir -p /oasis /scratch /projects /usr/lib64/slurm /var/run/munge && \ + mkdir -p /cvmfs/config-osg.opensciencegrid.org /cvmfs/software.igwn.org /cvmfs/gwosc.osgstorage.org && echo "config-osg.opensciencegrid.org /cvmfs/config-osg.opensciencegrid.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "software.igwn.org /cvmfs/software.igwn.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "gwosc.osgstorage.org /cvmfs/gwosc.osgstorage.org cvmfs ro,noauto 0 0" >> /etc/fstab && mkdir -p /oasis /scratch /projects /usr/lib64/slurm /var/run/munge && \ groupadd -g 1000 pycbc && useradd -u 1000 -g 1000 -d /opt/pycbc -k /etc/skel -m -s /bin/bash pycbc # Install MPI software needed for pycbc_inference @@ -37,7 +37,7 @@ ENV PATH "/usr/local/bin:/usr/bin:/bin:/lib64/openmpi/bin/bin" # Set the default LAL_DATA_PATH to point at CVMFS first, then the container. # Users wanting it to point elsewhere should start docker using: # docker -e LAL_DATA_PATH="/my/new/path" -ENV LAL_DATA_PATH "/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation:/opt/pycbc/pycbc-software/share/lal-data" +ENV LAL_DATA_PATH "/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation:/opt/pycbc/pycbc-software/share/lal-data" # When the container is started with # docker run -it pycbc/pycbc-el8:latest diff --git a/bin/all_sky_search/pycbc_add_statmap b/bin/all_sky_search/pycbc_add_statmap index 7cf5da20d97..5ea7a4cad17 100755 --- a/bin/all_sky_search/pycbc_add_statmap +++ b/bin/all_sky_search/pycbc_add_statmap @@ -345,6 +345,8 @@ else: f_in.attrs['background_time_exc'], **significance_dict[ifo_combo_key]) +del ifo_combo_key + logging.info('Combining false alarm rates from all available backgrounds') # Convert dictionary of whether the ifo combination is available at trigger @@ -361,8 +363,14 @@ fg_fars_out = np.sum(isincombo_mask * fg_fars, axis=0) fg_fars_exc_out = np.sum(isincombo_mask * fg_fars_exc, axis=0) # Apply any limits as appropriate -fg_fars_out = significance.apply_far_limit(fg_fars_out, significance_dict, combo=fg_coinc_type) -fg_fars_exc_out = significance.apply_far_limit(fg_fars_exc_out, significance_dict, combo=fg_coinc_type) +fg_fars_out = significance.apply_far_limit( + fg_fars_out, + significance_dict, + combo=fg_coinc_type) +fg_fars_exc_out = significance.apply_far_limit( + fg_fars_exc_out, + significance_dict, + combo=fg_coinc_type) fg_ifar = conv.sec_to_year(1. / fg_fars_out) fg_ifar_exc = conv.sec_to_year(1. / fg_fars_exc_out) @@ -562,6 +570,7 @@ while True: final_combined_fg = final_combined_fg + \ combined_fg_data.select(where_combined) combined_fg_data = combined_fg_data.remove(where_combined) + fg_coinc_type = np.delete(fg_coinc_type, where_combined) n_triggers -= 1 logging.info('Removing background triggers at time {} within window ' @@ -604,7 +613,18 @@ while True: sep_fg_data[key].data['stat'], sep_bg_data[key].data['decimation_factor'], bg_t_y, - **significance_dict[ifo_combo_key]) + **significance_dict[key]) + fg_far = significance.apply_far_limit( + fg_far, + significance_dict, + combo=key, + ) + bg_far = significance.apply_far_limit( + bg_far, + significance_dict, + combo=key, + ) + sep_bg_data[key].data['ifar'] = 1. / bg_far sep_fg_data[key].data['ifar'] = 1. / fg_far sep_fg_data[key].data['fap'] = 1 - \ @@ -617,7 +637,7 @@ while True: combined_fg_data.data['stat'], sep_bg_data[key].data['decimation_factor'], bg_time_ct[key], - **significance_dict[ifo_combo_key]) + **significance_dict[key]) # Set up variable for whether each coincidence is available in each coincidence time is_in_combo_time[key] = np.zeros(n_triggers) end_times = np.array(f['segments/%s/end' % key][:]) @@ -631,9 +651,15 @@ while True: isincombo_mask = np.array([list(is_in_combo_time[ct]) for ct in all_ifo_combos]) fg_fars = np.array([list(far[ct]) for ct in all_ifo_combos]) + fg_fars_out = np.sum(isincombo_mask * fg_fars, axis=0) + fg_fars_out = significance.apply_far_limit( + fg_fars_out, + significance_dict, + combo=fg_coinc_type, + ) # Combine the FARs with the mask to obtain the new ifars combined_fg_data.data['ifar'] = conv.sec_to_year( - 1. / np.sum(isincombo_mask * fg_fars, axis=0)) + 1. / fg_fars_out) fg_time -= args.cluster_window combined_fg_data.data['fap'] = 1 - \ np.exp(-conv.sec_to_year(fg_time) / combined_fg_data.data['ifar']) diff --git a/bin/all_sky_search/pycbc_coinc_statmap b/bin/all_sky_search/pycbc_coinc_statmap index 8fcd7836fd9..bfe97b866ac 100755 --- a/bin/all_sky_search/pycbc_coinc_statmap +++ b/bin/all_sky_search/pycbc_coinc_statmap @@ -257,10 +257,22 @@ bg_far_exc, fg_far_exc = significance.get_far( background_time_exc, **significance_dict[ifo_combo]) -fg_far = significance.apply_far_limit(fg_far, significance_dict, combo=ifo_combo) -bg_far = significance.apply_far_limit(bg_far, significance_dict, combo=ifo_combo) -fg_far_exc = significance.apply_far_limit(fg_far_exc, significance_dict, combo=ifo_combo) -bg_far_exc = significance.apply_far_limit(bg_far_exc, significance_dict, combo=ifo_combo) +fg_far = significance.apply_far_limit( + fg_far, + significance_dict, + combo=ifo_combo) +bg_far = significance.apply_far_limit( + bg_far, + significance_dict, + combo=ifo_combo) +fg_far_exc = significance.apply_far_limit( + fg_far_exc, + significance_dict, + combo=ifo_combo) +bg_far_exc = significance.apply_far_limit( + bg_far_exc, + significance_dict, + combo=ifo_combo) f['background/ifar'] = conv.sec_to_year(1. / bg_far) f['background_exc/ifar'] = conv.sec_to_year(1. / bg_far_exc) @@ -421,6 +433,17 @@ while numpy.any(ifar_foreground >= background_time): return_counts=True, **significance_dict[ifo_combo]) + fg_far = significance.apply_far_limit( + fg_far, + significance_dict, + combo=ifo_combo + ) + bg_far = significance.apply_far_limit( + bg_far, + significance_dict, + combo=ifo_combo, + ) + # Update the ifar_foreground criteria depending on whether foreground # triggers are being removed via inclusive or exclusive background. if args.hierarchical_removal_against == 'inclusive': @@ -435,6 +458,11 @@ while numpy.any(ifar_foreground >= background_time): exc_zero_trigs.decimation_factor, background_time_exc, **significance_dict[ifo_combo]) + fg_far_exc = significance.apply_far_limit( + fg_far_exc, + significance_dict, + combo=ifo_combo + ) ifar_foreground = 1. / fg_far_exc # ifar_foreground has been updated and the code can continue. diff --git a/bin/all_sky_search/pycbc_coinc_statmap_inj b/bin/all_sky_search/pycbc_coinc_statmap_inj index 2dab3a71581..03e4eab26c6 100644 --- a/bin/all_sky_search/pycbc_coinc_statmap_inj +++ b/bin/all_sky_search/pycbc_coinc_statmap_inj @@ -37,12 +37,21 @@ if args.verbose: log_level = logging.INFO logging.basicConfig(format='%(asctime)s : %(message)s', level=log_level) -ifo_key = ''.join(args.ifos) -significance_dict = significance.digest_significance_options([ifo_key], args) window = args.cluster_window logging.info("Loading coinc zerolag triggers") zdata = pycbc.io.MultiifoStatmapData(files=args.zero_lag_coincs, ifos=args.ifos) + +if 'ifos' in zdata.attrs: + ifos = zdata.attrs['ifos'].split(' ') + logging.info('using ifos from file {}'.format(args.zero_lag_coincs[0])) +else: + ifos = args.ifos + logging.info('using ifos from command line input') + +ifo_key = ''.join(ifos) +significance_dict = significance.digest_significance_options([ifo_key], args) + zdata = zdata.cluster(window) f = h5py.File(args.output_file, "w") @@ -51,7 +60,7 @@ f.attrs['num_of_ifos'] = zdata.attrs['num_of_ifos'] f.attrs['pivot'] = zdata.attrs['pivot'] f.attrs['fixed'] = zdata.attrs['fixed'] f.attrs['timeslide_interval'] = zdata.attrs['timeslide_interval'] -f.attrs['ifos'] = ' '.join(sorted(args.ifos)) +f.attrs['ifos'] = ' '.join(sorted(ifos)) # Copy over the segment for coincs and singles for key in zdata.seg.keys(): @@ -90,7 +99,11 @@ if len(zdata) > 0: background_time, **significance_dict[ifo_key]) - fg_far_exc = significance.apply_far_limit(fg_far_exc, significance_dict, combo=ifo_key) + fg_far_exc = significance.apply_far_limit( + fg_far_exc, + significance_dict, + combo=ifo_key, + ) ifar_exc = 1. / fg_far_exc fap_exc = 1 - numpy.exp(- coinc_time / ifar_exc) diff --git a/bin/all_sky_search/pycbc_sngls_statmap b/bin/all_sky_search/pycbc_sngls_statmap index 9ed977c5ac9..e3c2f03ebdb 100755 --- a/bin/all_sky_search/pycbc_sngls_statmap +++ b/bin/all_sky_search/pycbc_sngls_statmap @@ -146,8 +146,16 @@ bg_far, fg_far = significance.get_far( fg_time, **significance_dict[ifo]) -fg_far = significance.apply_far_limit(fg_far, significance_dict, combo=ifo) -bg_far = significance.apply_far_limit(bg_far, significance_dict, combo=ifo) +fg_far = significance.apply_far_limit( + fg_far, + significance_dict, + combo=ifo, +) +bg_far = significance.apply_far_limit( + bg_far, + significance_dict, + combo=ifo, +) bg_ifar = 1. / bg_far fg_ifar = 1. / fg_far @@ -341,6 +349,18 @@ while numpy.any(ifar_louder > hier_ifar_thresh_s): fg_time, **significance_dict[ifo]) + fg_far = significance.apply_far_limit( + fg_far, + significance_dict, + combo=ifo, + ) + + bg_far = significance.apply_far_limit( + bg_far, + significance_dict, + combo=ifo, + ) + bg_ifar = 1. / bg_far fg_ifar = 1. / fg_far @@ -359,6 +379,12 @@ while numpy.any(ifar_louder > hier_ifar_thresh_s): fg_time_exc, **significance_dict[ifo]) + fg_far_exc = significance.apply_far_limit( + fg_far_exc, + significance_dict, + combo=ifo, + ) + fg_ifar_exc = 1. / fg_far_exc ifar_louder = fg_ifar_exc diff --git a/bin/all_sky_search/pycbc_sngls_statmap_inj b/bin/all_sky_search/pycbc_sngls_statmap_inj index a75cfee613b..c7ba970d7c4 100644 --- a/bin/all_sky_search/pycbc_sngls_statmap_inj +++ b/bin/all_sky_search/pycbc_sngls_statmap_inj @@ -117,8 +117,14 @@ bg_far_exc, fg_far_exc = significance.get_far( fg_time_exc, **significance_dict[ifo]) -fg_far_exc = significance.apply_far_limit(fg_far_exc, significance_dict, combo=ifo) -bg_far_exc = significance.apply_far_limit(bg_far_exc, significance_dict, combo=ifo) +fg_far_exc = significance.apply_far_limit( + fg_far_exc, + significance_dict, + combo=ifo) +bg_far_exc = significance.apply_far_limit( + bg_far_exc, + significance_dict, + combo=ifo) fg_ifar_exc = 1. / fg_far_exc bg_ifar_exc = 1. / bg_far_exc diff --git a/bin/minifollowups/pycbc_page_snglinfo b/bin/minifollowups/pycbc_page_snglinfo index e6bc8dc31a4..eb248daecd1 100644 --- a/bin/minifollowups/pycbc_page_snglinfo +++ b/bin/minifollowups/pycbc_page_snglinfo @@ -138,8 +138,11 @@ else: if args.ranking_statistic in ["quadsum", "single_ranking_only"]: stat_name = sngl_stat_name + stat_name_long = sngl_stat_name else: - stat_name = '_with_'.join(ranking_statistic, sngl_ranking) + # Name would be too long - just call it ranking statistic + stat_name = 'Ranking Statistic' + stat_name_long = ' with '.join([args.ranking_statistic, args.sngl_ranking]) headers.append(stat_name) @@ -201,7 +204,7 @@ html = pycbc.results.dq.redirect_javascript + \ if args.n_loudest: title = 'Parameters of single-detector event ranked %s' \ % (args.n_loudest + 1) - caption = 'Parameters of the single-detector event ranked number %s by %s. The figures below show the mini-followup data for this event.' % (args.n_loudest + 1, stat_name) + caption = 'Parameters of the single-detector event ranked number %s by %s. The figures below show the mini-followup data for this event.' % (args.n_loudest + 1, stat_name_long) else: title = 'Parameters of single-detector event' caption = 'Parameters of the single-detector event. The figures below show the mini-followup data for this event.' diff --git a/bin/minifollowups/pycbc_plot_trigger_timeseries b/bin/minifollowups/pycbc_plot_trigger_timeseries index 8f5f5f0a0da..ef9829dab01 100644 --- a/bin/minifollowups/pycbc_plot_trigger_timeseries +++ b/bin/minifollowups/pycbc_plot_trigger_timeseries @@ -92,21 +92,11 @@ for ifo in args.single_trigger_files.keys(): logging.info("Getting %s", args.plot_type) rank = ranking.get_sngls_ranking_from_trigs(trigs, args.plot_type) - if all(rank == 1): - # This is the default value to say "downranked beyond consideration" - # so skip these events - pylab.scatter(-2 * args.window, 0, - color=pycbc.results.ifo_color(ifo), - marker='x', - label=ifo) - continue - pylab.scatter(trigs['end_time'] - t, rank, color=pycbc.results.ifo_color(ifo), marker='x', label=ifo) - # Minimum rank which hasn't been set to the default of 1 - min_rank = min(min_rank, rank[rank > 1].min()) + min_rank = min(min_rank, rank.min()) if args.special_trigger_ids: special_idx = args.special_trigger_ids[ifo] @@ -138,7 +128,10 @@ logging.info("Saving figure") pycbc.results.save_fig_with_metadata(fig, args.output_file, cmd = ' '.join(sys.argv), title = 'Single Detector Trigger Timeseries (%s)' % args.plot_type, - caption = 'Time series showing the single detector triggers' - ' centered around the time of the trigger of interest.', + caption = 'Time series showing the single-detector triggers ' + 'centered around the time of the trigger of interest. ' + 'Triggers with ranking 1 have been downweighted beyond ' + 'consideration, but may still form insignificant ' + 'events.', ) logging.info("Done!") diff --git a/bin/minifollowups/pycbc_sngl_minifollowup b/bin/minifollowups/pycbc_sngl_minifollowup index bd3ae4bed1a..01fbc743e83 100644 --- a/bin/minifollowups/pycbc_sngl_minifollowup +++ b/bin/minifollowups/pycbc_sngl_minifollowup @@ -14,29 +14,24 @@ # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -""" Followup foreground events +""" +Followup single-detector triggers which do not contribute to foreground events """ import os, argparse, logging import numpy from ligo.lw import table from ligo.lw import utils as ligolw_utils from pycbc.results import layout +from pycbc.types.optparse import MultiDetOptionAction from pycbc.events import select_segments_by_definer import pycbc.workflow.minifollowups as mini import pycbc.version import pycbc.workflow as wf import pycbc.events from pycbc.workflow.core import resolve_url_to_file -from pycbc.events import stat +from pycbc.events import stat, veto, coinc from pycbc.io import hdf -def add_wiki_row(outfile, cols): - """ - Adds a wiki-formatted row to an output file from a list or a numpy array. - """ - with open(outfile, 'a') as f: - f.write('||%s||\n' % '||'.join(map(str,cols))) - parser = argparse.ArgumentParser(description=__doc__[1:]) parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', @@ -44,11 +39,23 @@ parser.add_argument('--bank-file', parser.add_argument('--single-detector-file', help="HDF format merged single detector trigger files") parser.add_argument('--instrument', help="Name of interferometer e.g. H1") +parser.add_argument('--foreground-censor-file', + help="The censor file to be used if vetoing triggers " + "in the foreground of the search (optional).") +parser.add_argument('--foreground-segment-name', + help="If using foreground censor file must also provide " + "the name of the segment to use as a veto.") parser.add_argument('--veto-file', - help="The veto file to be used if vetoing triggers (optional).") + help="The veto file to be used if vetoing triggers " + "(optional).") parser.add_argument('--veto-segment-name', - help="If using veto file must also provide the name of the segment to use " - "as a veto.") + help="If using veto file must also provide the name of " + "the segment to use as a veto.") +parser.add_argument("--gating-veto-windows", nargs='+', + action=MultiDetOptionAction, + help="Seconds to be vetoed before and after the central time " + "of each gate. Given as detector-values pairs, e.g. " + "H1:-1,2.5 L1:-1,2.5 V1:0,0") parser.add_argument('--inspiral-segments', help="xml segment file containing the inspiral analysis " "times") @@ -60,17 +67,22 @@ parser.add_argument('--inspiral-data-analyzed-name', "analyzed by each analysis job.") parser.add_argument('--min-snr', type=float, default=6.5, help="Minimum SNR to consider for loudest triggers") -parser.add_argument('--non-coinc-time-only', default=False, - action='store_true', +parser.add_argument('--non-coinc-time-only', action='store_true', help="If given remove (veto) single-detector triggers " "that occur during a time when at least one other " "instrument is taking science data.") +parser.add_argument('--vetoed-time-only', action='store_true', + help="If given, only report on single-detector triggers " + "that occur during vetoed times.") parser.add_argument('--minimum-duration', default=None, type=float, help="If given only consider single-detector triggers " "with template duration larger than this.") parser.add_argument('--maximum-duration', default=None, type=float, help="If given only consider single-detector triggers " "with template duration smaller than this.") +parser.add_argument('--cluster-window', type=float, default=10, + help="Window (seconds) over which to cluster triggers " + "when finding the loudest-ranked. Default=10") wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser, include_subdax_opts=True) stat.insert_statistic_option_group(parser, @@ -95,6 +107,12 @@ sngl_file = resolve_url_to_file( attrs={'ifos': args.instrument} ) +# Flatten the statistic_files option: +statfiles = [] +for f in sum(args.statistic_files, []): + statfiles.append(resolve_url_to_file(os.path.abspath(f))) +statfiles = wf.FileList(statfiles) if statfiles is not [] else None + if args.veto_file is not None: veto_file = resolve_url_to_file( os.path.abspath(args.veto_file), @@ -102,6 +120,7 @@ if args.veto_file is not None: ) else: veto_file = None + insp_segs = resolve_url_to_file(os.path.abspath(args.inspiral_segments)) insp_data_seglists = select_segments_by_definer\ (args.inspiral_segments, segment_name=args.inspiral_data_read_name, @@ -112,20 +131,89 @@ num_events = int(workflow.cp.get_opt_tags('workflow-sngl_minifollowups', 'num-sngl-events', '')) # This helps speed up the processing to ignore a large fraction of triggers -snr_mask = None +mask = None +f = hdf.HFile(args.single_detector_file, 'r') +n_triggers = f['{}/snr'.format(args.instrument)].size +logging.info("%i triggers in file", n_triggers) if args.min_snr: logging.info('Calculating Prefilter') - f = hdf.HFile(args.single_detector_file, 'r') idx, _ = f.select(lambda snr: snr > args.min_snr, '{}/snr'.format(args.instrument), return_indices=True) - snr_mask = numpy.zeros(len(f['{}/snr'.format(args.instrument)]), - dtype=bool) - snr_mask[idx] = True + mask = numpy.zeros(n_triggers, dtype=bool) + mask[idx] = True + if len(idx) < num_events: + logging.info("Fewer triggers exist after the --min-snr cut (%d) " + "than requested for the minifollowup (%d)", + len(idx), num_events) -trigs = hdf.SingleDetTriggers(args.single_detector_file, args.bank_file, - args.veto_file, args.veto_segment_name, - None, args.instrument, premask=snr_mask) +trigs = hdf.SingleDetTriggers( + args.single_detector_file, + args.bank_file, + args.foreground_censor_file, + args.foreground_segment_name, + None, + args.instrument, + premask=mask +) + +# Include gating vetoes +if args.gating_veto_windows: + logging.info("Getting gating vetoes") + gating_veto = args.gating_veto_windows[args.instrument].split(',') + gveto_before = float(gating_veto[0]) + gveto_after = float(gating_veto[1]) + if gveto_before > 0 or gveto_after < 0: + raise ValueError("Gating veto window values must be negative before " + "gates and positive after gates.") + if not (gveto_before == 0 and gveto_after == 0): + gate_group = f[args.instrument + '/gating/'] + autogate_times = numpy.unique(gate_group['auto/time'][:]) + if 'file' in gate_group: + detgate_times = gate_group['file/time'][:] + else: + detgate_times = [] + gate_times = numpy.concatenate((autogate_times, detgate_times)) + gveto_idx = veto.indices_within_times( + trigs.end_time, + gate_times + gveto_before, + gate_times + gveto_after + ) + logging.info('%i triggers in gating vetoes', gveto_idx.size) +else: + gveto_idx = numpy.array([], dtype=numpy.uint64) + +if args.veto_file: + logging.info('Getting file vetoes') + # veto_mask is an array of indices into the trigger arrays + # giving the surviving triggers + veto_file_idx, _ = events.veto.indices_within_segments( + trigs.end_time, + [args.veto_file], + ifo=args.instrument, + segment_name=args.veto_segment_name + ) + + logging.info('%i triggers in file-vetoed segments', + veto_file_idx.size) +else: + veto_file_idx = numpy.array([], dtype=numpy.uint64) + +# Work out indices we are going to keep / remove +vetoed_idx = numpy.unique(numpy.concatenate((veto_file_idx, gveto_idx))) +# Needs to be in ascending order +vetoed_idx = numpy.sort(vetoed_idx).astype(numpy.uint64) + +if args.vetoed_time_only and vetoed_idx.size > 0: + logging.info("Applying mask to keep only triggers within vetoed time") + trigs.apply_mask(vetoed_idx) +elif vetoed_idx.size > 0: + logging.info("Applying mask to keep only triggers outwith vetoed time") + veto_mask = numpy.ones(trigs.end_time.size, dtype=bool) + veto_mask[vetoed_idx] = False + trigs.apply_mask(veto_mask) +elif args.vetoed_time_only and vetoed_idx.size == 0: + logging.warning("No triggers exist inside vetoed times") if args.non_coinc_time_only: from pycbc.io.ligolw import LIGOLWContentHandler as h @@ -158,29 +246,56 @@ if args.maximum_duration is not None: trigs.apply_mask(lgc_mask) logging.info('remaining triggers: %s', trigs.mask.sum()) -logging.info('finding loudest clustered events') +logging.info('Finding loudest clustered events') rank_method = stat.get_statistic_from_opts(args, [args.instrument]) -trigs.mask_to_n_loudest_clustered_events(rank_method, n_loudest=num_events) -if len(trigs.stat) < num_events: - num_events = len(trigs.stat) +extra_kwargs = {} +for inputstr in args.statistic_keywords: + try: + key, value = inputstr.split(':') + extra_kwargs[key] = value + except ValueError: + err_txt = "--statistic-keywords must take input in the " \ + "form KWARG1:VALUE1 KWARG2:VALUE2 KWARG3:VALUE3 ... " \ + "Received {}".format(args.statistic_keywords) + raise ValueError(err_txt) + +logging.info("Calculating statistic for %d triggers", len(trigs.snr)) +sds = rank_method.single(trigs) +stat = rank_method.rank_stat_single((args.instrument, sds), **extra_kwargs) +logging.info("Clustering events over %.3fs window", args.cluster_window) +cid = coinc.cluster_over_time(stat, trigs.end_time, + args.cluster_window) +trigs.apply_mask(cid) +stat = stat[cid] +if len(trigs.snr) < num_events: + num_events = len(trigs.snr) + +logging.info("Finding the loudest triggers") +loudest_idx = sorted(numpy.argsort(stat)[::-1][:num_events]) +trigs.apply_mask(loudest_idx) +stat = stat[loudest_idx] times = trigs.end_time tids = trigs.template_id # loop over number of loudest events to be followed up -order = trigs.stat.argsort()[::-1] +order = stat.argsort()[::-1] for rank, num_event in enumerate(order): logging.info('Processing event: %s', num_event) files = wf.FileList([]) time = times[num_event] ifo_time = '%s:%s' %(args.instrument, str(time)) - tid = trigs.mask[num_event] + if isinstance(trigs.mask, numpy.ndarray) and trigs.mask.dtype == bool: + tid = numpy.flatnonzero(trigs.mask)[num_event] + else: + tid = trigs.mask[num_event] ifo_tid = '%s:%s' %(args.instrument, str(tid)) layouts += (mini.make_sngl_ifo(workflow, sngl_file, tmpltbank_file, tid, args.output_dir, args.instrument, + statfiles=statfiles, tags=args.tags + [str(rank)]),) files += mini.make_trigger_timeseries(workflow, [sngl_file], ifo_time, args.output_dir, special_tids=ifo_tid, @@ -217,7 +332,6 @@ for rank, num_event in enumerate(order): args.output_dir, [args.instrument], tags=args.tags + [str(rank)]) - files += mini.make_singles_timefreq(workflow, sngl_file, tmpltbank_file, time, args.output_dir, data_segments=insp_data_seglists, diff --git a/bin/workflows/pycbc_make_offline_search_workflow b/bin/workflows/pycbc_make_offline_search_workflow index 9b3a7da8678..e0995d33cc1 100755 --- a/bin/workflows/pycbc_make_offline_search_workflow +++ b/bin/workflows/pycbc_make_offline_search_workflow @@ -188,7 +188,7 @@ insps = wf.merge_single_detector_hdf_files(workflow, hdfbank, tags=['full_data']) # setup sngl trigger distribution fitting jobs -# 'statfiles' is list of files used in calculating coinc statistic +# 'statfiles' is list of files used in calculating statistic # 'dqfiles' is the subset of files containing data quality information statfiles = [] dqfiles = [] @@ -458,7 +458,8 @@ for insp_file in full_insps: wf.setup_single_det_minifollowups\ (workflow, insp_file, hdfbank, insp_files_seg_file, data_analysed_name, trig_generated_name, 'daxes', currdir, - veto_file=censored_veto, veto_segment_name='closed_box', + statfiles=wf.FileList(statfiles), + fg_file=censored_veto, fg_name='closed_box', tags=insp_file.tags + [subsec]) ##################### COINC FULL_DATA plots ################################### diff --git a/docker/etc/docker-install.sh b/docker/etc/docker-install.sh index 68d31814e96..72246631395 100755 --- a/docker/etc/docker-install.sh +++ b/docker/etc/docker-install.sh @@ -11,11 +11,11 @@ mkdir -p /opt/pycbc/src cp -a /scratch /opt/pycbc/src/pycbc chmod a+rw /dev/fuse mount /cvmfs/config-osg.opensciencegrid.org -mount /cvmfs/oasis.opensciencegrid.org +mount /cvmfs/software.igwn.org mkdir -p /opt/pycbc/pycbc-software/share/lal-data -rsync --exclude='SEOBNRv1ROM*' --exclude='SEOBNRv2ROM_DS_HI_v1.0.hdf5' --exclude='NRSur4d2s_FDROM.hdf5' -ravz /cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation/ /opt/pycbc/pycbc-software/share/lal-data/ +rsync --exclude='SEOBNRv1ROM*' --exclude='SEOBNRv2ROM_DS_HI_v1.0.hdf5' --exclude='NRSur4d2s_FDROM.hdf5' -ravz /cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation/ /opt/pycbc/pycbc-software/share/lal-data/ umount /cvmfs/config-osg.opensciencegrid.org -umount /cvmfs/oasis.opensciencegrid.org +umount /cvmfs/software.igwn.org chown -R 1000:1000 /opt/pycbc/src /opt/pycbc/pycbc-software chmod -R u=rwX,g=rX,o=rX /opt/pycbc exit 0 diff --git a/docs/_include/inference_example_lisa_smbhb_inj.sh b/docs/_include/inference_example_lisa_smbhb_inj.sh deleted file mode 100644 index d5ae715389b..00000000000 --- a/docs/_include/inference_example_lisa_smbhb_inj.sh +++ /dev/null @@ -1,6 +0,0 @@ -set -e -export OMP_NUM_THREADS=1 -cp ../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini injection_smbhb.ini -sh ../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh -sh ../../examples/inference/lisa_smbhb_inj/run.sh -sh ../../examples/inference/lisa_smbhb_inj/plot.sh diff --git a/docs/_include/inference_example_lisa_smbhb_ldc.sh b/docs/_include/inference_example_lisa_smbhb_ldc.sh deleted file mode 100644 index 29f3a6c8e8b..00000000000 --- a/docs/_include/inference_example_lisa_smbhb_ldc.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -e -export OMP_NUM_THREADS=1 -sh ../../examples/inference/lisa_smbhb_ldc/get.sh -sh ../../examples/inference/lisa_smbhb_ldc/run.sh -python ../../examples/inference/lisa_smbhb_ldc/advanced_plot.py diff --git a/docs/building_bundled_executables.rst b/docs/building_bundled_executables.rst index d8d014047c6..612711b4eed 100644 --- a/docs/building_bundled_executables.rst +++ b/docs/building_bundled_executables.rst @@ -96,7 +96,7 @@ The script executes ``pycbc_inspiral`` as part of the build process. This may require LAL data at build time. The LAL data can be given with the command line argument:: - --with-lal-data-path=/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation + --with-lal-data-path=/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation The default command line arguments clone PyCBC from the standard GitHub repository. If you would like to build a bundle using code from your own @@ -130,6 +130,6 @@ Building Releases for CVMFS To build a release of ``pycbc_inspiral`` for installation in CVMFS, run the script with the arguments:: - pycbc_build_eah.sh --lalsuite-commit=a3a5a476d33f169b8749e2840c306a48df63c936 --pycbc-commit=b68832784969a47fe2658abffb3888ee06cd1be4 --with-extra-libs=file:///home/pycbc/build/composer_xe_2015.0.090.tar.gz --with-lal-data-path=/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation + pycbc_build_eah.sh --lalsuite-commit=a3a5a476d33f169b8749e2840c306a48df63c936 --pycbc-commit=b68832784969a47fe2658abffb3888ee06cd1be4 --with-extra-libs=file:///home/pycbc/build/composer_xe_2015.0.090.tar.gz --with-lal-data-path=/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation changing the ``--lalsuite-commit``, ``--pycbc-commit``, and ``--with-lal-data-path`` options to the values for the release. diff --git a/docs/inference.rst b/docs/inference.rst index 1ace8055679..41eb97a9b94 100644 --- a/docs/inference.rst +++ b/docs/inference.rst @@ -505,8 +505,6 @@ Examples inference/examples/single.rst inference/examples/relative.rst inference/examples/hierarchical.rst - inference/examples/lisa_smbhb_ldc_pe.rst - inference/examples/lisa_smbhb_inj_pe.rst inference/examples/sampler_platter.rst inference/models.rst diff --git a/docs/inference/examples/lisa_smbhb_inj_pe.rst b/docs/inference/examples/lisa_smbhb_inj_pe.rst deleted file mode 100644 index 7924261c8f9..00000000000 --- a/docs/inference/examples/lisa_smbhb_inj_pe.rst +++ /dev/null @@ -1,66 +0,0 @@ -.. _inference_example_lisa_smbhb_inj: - ---------------------------------------------- -LISA SMBHB injection and parameter estimation ---------------------------------------------- - -This example shows how to use PyCBC for time-domain LISA TDI noise generation and -supermassive black hole binaries (SMBHB) signal injection. This one is similar to -:ref:`LISA parameter estimation for simulated SMBHB from LDC example -`, the main difference is we generate our own mock data -in this example. In order to to that, we use -`LISA TDI PSD module `_ -to generate the stationary and Gaussian noise for each TDI channel in the time domain, then we use -`waveform injection module `_ -to add the simulated signal into the simulated noise. - -First, we use the following configuration file to define the parameters of our SMBHB injection, we use the -same parameters from the SMBHB signal in :ref:`LISA parameter estimation for simulated SMBHB from LDC example -`: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini - :language: ini - -:download:`Download <../../../examples/inference/lisa_smbhb_inj/injection_smbhb.ini>` - -Then we run the following bash script to create a .hdf file that contains same information: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh - :language: bash - -:download:`Download <../../../examples/inference/lisa_smbhb_inj/injection_smbhb.sh>` - -Here, we use a similar configuration file for parameter estimation, we also use -:py:class:`Relative ` model. We also just -set chirp mass, mass ratio and tc as variable parameters, `tc`, `eclipticlongitude`, `eclipticlatitude` -and `polarization` are defined in the LISA frame: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_inj/lisa_smbhb_relbin.ini - :language: ini - -:download:`Download <../../../examples/inference/lisa_smbhb_inj/lisa_smbhb_relbin.ini>` - - -Now run: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_inj/run.sh - :language: bash - -:download:`Download <../../../examples/inference/lisa_smbhb_inj/run.sh>` - -To plot the posterior distribution after the last iteration, you can run the following script: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_inj/plot.sh - :language: bash - -:download:`Download <../../../examples/inference/lisa_smbhb_inj/plot.sh>` - -In this example it will create the following plot: - -.. image:: ../../_include/lisa_smbhb_mass_tc.png - :scale: 60 - :align: center - -The scatter points show each walker's position after the last iteration. The -points are colored by the SNR at that point, with the 50th and 90th -percentile contours drawn. The red lines represent the true parameters of injected signal. diff --git a/docs/inference/examples/lisa_smbhb_ldc_pe.rst b/docs/inference/examples/lisa_smbhb_ldc_pe.rst deleted file mode 100644 index 0e49929d0bc..00000000000 --- a/docs/inference/examples/lisa_smbhb_ldc_pe.rst +++ /dev/null @@ -1,70 +0,0 @@ -.. _inference_example_lisa_smbhb_ldc: - ------------------------------------------------------- -LISA parameter estimation for simulated SMBHB from LDC ------------------------------------------------------- - -This example shows how to use PyCBC for parameter estimation of supermassive black hole binaries (SMBHB) -in LISA mock data. The `data `_ are generated from -`LISA Data Challenge 2a: Sangria `_, -and `BBHx `_ package is used to generate the ``IMRPhenomD`` template and calculate -the corresponding TDI response for LISA. Relative binning (heterodyned likelihood) -is used during sampling to speed up the computation of likelihood functions. Before doing parameter estimation, -you need to install `BBHx `_ and `the corresponding PyCBC waveform plugin `_, -please click the corresponding link to see the detailed description of the installation. - -First, we create the following configuration file, here we just set chirp mass, mass ratio and tc as variable parameters, -`tc`, `eclipticlongitude`, `eclipticlatitude` and `polarization` are defined in the LISA frame: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_ldc/lisa_smbhb_relbin.ini - :language: ini - -:download:`Download <../../../examples/inference/lisa_smbhb_ldc/lisa_smbhb_relbin.ini>` - -By setting the model name to ``relative`` we are using -:py:class:`Relative ` model. - -In this simple example, we do the parameter estimation for the first SMBHB signal in the LDC Sangria dataset -(you can also run parameter estimation for other SMBHB signals by choosing appropriate prior range), -we need download the data first (`MBHB_params_v2_LISA_frame.pkl` contains all the true parameters): - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_ldc/get.sh - :language: bash - -:download:`Download <../../../examples/inference/lisa_smbhb_ldc/get.sh>` - -Now run: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_ldc/run.sh - :language: bash - -:download:`Download <../../../examples/inference/lisa_smbhb_ldc/run.sh>` - -This will run the ``dynesty`` sampler. When it is done, you will have a file called -``lisa_smbhb.hdf`` which contains the results. It should take about three minutes to -run. - -To plot the posterior distribution after the last iteration, you can run the following simplified script: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_ldc/plot.sh - :language: bash - -:download:`Download <../../../examples/inference/lisa_smbhb_ldc/plot.sh>` - -Or you can run the advanced one: - -.. literalinclude:: ../../../examples/inference/lisa_smbhb_ldc/advanced_plot.py - :language: python - -:download:`Download <../../../examples/inference/lisa_smbhb_ldc/advanced_plot.py>` - -You can modify this advanced plot script to generate the posterior of any SMBHB signals in the LDC Sangria dataset. -In this example it will create the following plot: - -.. image:: ../../_include/lisa_smbhb_mass_tc_0.png - :scale: 60 - :align: center - -The scatter points show each walker's position after the last iteration. The -points are colored by the SNR at that point, with the 50th and 90th -percentile contours drawn. The red lines represent the true parameters of injected signal. diff --git a/docs/inference/models.rst b/docs/inference/models.rst index d845476ec20..a3aa15746d2 100644 --- a/docs/inference/models.rst +++ b/docs/inference/models.rst @@ -133,8 +133,6 @@ Heterodyne / Relative Models to the models that need to generate a full waveform for every likelihood as these will usually be much faster. - There is also support in this model for use with :ref:`LISA Sangria data analysis ` and :ref:`LISA injection data analysis `. - Supported Marginalizations: distance, coa_phase (dominant mode), polarization +++ Earth Rotation:✅ LISA:✅ Higher Modes:❌ @@ -166,16 +164,6 @@ Heterodyne / Relative Models +++ Earth Rotation:❌ LISA:❌ Higher Modes:❌ -.. card:: Brute force LISA sky modes - - ``'brute_lisa_sky_modes_marginalize'`` :py:class:`pycbc.inference.models.relbin.Relative` - - The models does a brute force marginalization over the LISA sky mode - degeneracies. It is built upon the `relative` model - - Supported Marginalizations: distance, coa_phase (dominant mode) - +++ - Earth Rotation:❌ LISA:✅ Higher Modes:❌ ========================================= Extrinsic Parameter Only Models diff --git a/docs/install.rst b/docs/install.rst index f5b85aee832..9f9c308be74 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -70,12 +70,12 @@ available in a 'IGWN Conda' environment. To see what environments are available This should yield ``igwn-py37`` as one choice. The output of this command will also tell you the location of the environment in the file system. Then, the location of the -python3.7 executable is for instance ``/cvmfs/oasis.opensciencegrid.org/ligo/sw/conda/envs/igwn-py37/bin/python`` +python3.7 executable is for instance ``/cvmfs/software.igwn.org/conda/envs/igwn-py37/bin/python`` and you will create the virtualenv via the command .. code-block:: bash - virtualenv -p /cvmfs/oasis.opensciencegrid.org/ligo/sw/conda/envs/igwn-py37/bin/python env + virtualenv -p /cvmfs/software.igwn.org/conda/envs/igwn-py37/bin/python env Once the virtualenv has been created you can install PyCBC from PyPI or a local copy with the `[igwn]` extra specifier to install the optional extra requirements diff --git a/docs/install_lalsuite.rst b/docs/install_lalsuite.rst index 438b3b358bb..52d7e340925 100644 --- a/docs/install_lalsuite.rst +++ b/docs/install_lalsuite.rst @@ -146,7 +146,7 @@ run the command .. code-block:: bash - echo 'export LAL_DATA_PATH=/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation' >> $VIRTUAL_ENV/bin/activate + echo 'export LAL_DATA_PATH=/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation' >> $VIRTUAL_ENV/bin/activate to add the appropriate path to your virtual environment's ``activate`` script. Then deactivate and activate your virtual environment. diff --git a/docs/workflow/pycbc_make_offline_search_workflow.rst b/docs/workflow/pycbc_make_offline_search_workflow.rst index e7694321d8a..7addf015000 100644 --- a/docs/workflow/pycbc_make_offline_search_workflow.rst +++ b/docs/workflow/pycbc_make_offline_search_workflow.rst @@ -987,8 +987,8 @@ locations in CVMFS. You will also need to specify where the code should get the data needed to generate reduced order model waveforms. To do this add the following additional arguments to ``pycbc_submit_dax``:: - --append-site-profile 'local:env|LAL_DATA_PATH:/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation' \ - --append-site-profile 'osg:env|LAL_DATA_PATH:/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation' \ + --append-site-profile 'local:env|LAL_DATA_PATH:/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation' \ + --append-site-profile 'osg:env|LAL_DATA_PATH:/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation' \ Here, ``current`` is a symbolic link to the latest version of the data and can be replaced with a specific release (e.g. ``e02dab8c``) if required. diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index 6966e2586ae..4151571e356 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -211,6 +211,9 @@ def rank_stat_single(self, single_info, """ Calculate the statistic for a single detector candidate + For this statistic this is just passing through the + single value, which will be the second entry in the tuple. + Parameters ---------- single_info: tuple @@ -222,7 +225,7 @@ def rank_stat_single(self, single_info, numpy.ndarray The array of single detector statistics """ - return self.single(single_info[1]) + return single_info[1] def rank_stat_coinc(self, sngls_list, slide, step, to_shift, **kwargs): # pylint:disable=unused-argument @@ -663,6 +666,9 @@ def rank_stat_single(self, single_info, """ Calculate the statistic for a single detector candidate + For this statistic this is just passing through the + single value, which will be the second entry in the tuple. + Parameters ---------- single_info: tuple @@ -674,7 +680,7 @@ def rank_stat_single(self, single_info, numpy.ndarray The array of single detector statistics """ - return self.single(single_info[1]) + return single_info[1] def rank_stat_coinc(self, sngls_list, slide, step, to_shift, **kwargs): # pylint:disable=unused-argument diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py index edac3f26964..2cd0dd1e450 100644 --- a/pycbc/io/hdf.py +++ b/pycbc/io/hdf.py @@ -517,7 +517,8 @@ def mask_to_n_loudest_clustered_events(self, rank_method, be considered.""" # If this becomes memory intensive we can optimize - stat = rank_method.rank_stat_single((self.ifo, self.trig_dict())) + sds = rank_method.single(self.trig_dict()) + stat = rank_method.rank_stat_single((self.ifo, sds)) if len(stat) == 0: # No triggers, so just return here self.stat = np.array([]) diff --git a/pycbc/workflow/minifollowups.py b/pycbc/workflow/minifollowups.py index 1f16c714e71..5e59533da5d 100644 --- a/pycbc/workflow/minifollowups.py +++ b/pycbc/workflow/minifollowups.py @@ -125,7 +125,8 @@ def setup_foreground_minifollowups(workflow, coinc_file, single_triggers, def setup_single_det_minifollowups(workflow, single_trig_file, tmpltbank_file, insp_segs, insp_data_name, insp_anal_name, dax_output, out_dir, veto_file=None, - veto_segment_name=None, statfiles=None, + veto_segment_name=None, fg_file=None, + fg_name=None, statfiles=None, tags=None): """ Create plots that followup the Nth loudest clustered single detector triggers from a merged single detector trigger HDF file. @@ -192,8 +193,11 @@ def setup_single_det_minifollowups(workflow, single_trig_file, tmpltbank_file, assert(veto_segment_name is not None) node.add_input_opt('--veto-file', veto_file) node.add_opt('--veto-segment-name', veto_segment_name) + if fg_file is not None: + assert(fg_name is not None) + node.add_input_opt('--foreground-censor-file', fg_file) + node.add_opt('--foreground-segment-name', fg_name) if statfiles: - statfiles = statfiles.find_output_with_ifo(curr_ifo) node.add_input_list_opt('--statistic-files', statfiles) if tags: node.add_list_opt('--tags', tags) @@ -555,7 +559,7 @@ def make_coinc_info(workflow, singles, bank, coinc, out_dir, return files def make_sngl_ifo(workflow, sngl_file, bank_file, trigger_id, out_dir, ifo, - title=None, tags=None): + statfiles=None, title=None, tags=None): """Setup a job to create sngl detector sngl ifo html summary snippet. """ tags = [] if tags is None else tags @@ -568,6 +572,8 @@ def make_sngl_ifo(workflow, sngl_file, bank_file, trigger_id, out_dir, ifo, node.add_input_opt('--bank-file', bank_file) node.add_opt('--trigger-id', str(trigger_id)) node.add_opt('--instrument', ifo) + if statfiles is not None: + node.add_input_list_opt('--statistic-files', statfiles) if title is not None: node.add_opt('--title', f'"{title}"') node.new_output_file_opt(workflow.analysis_time, '.html', '--output-file') diff --git a/setup.py b/setup.py index eee746d3da2..35be1165e91 100755 --- a/setup.py +++ b/setup.py @@ -119,7 +119,7 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.3.2' + vinfo.version = '2.3.3' vinfo.release = 'True' version_script = f"""# coding: utf-8 diff --git a/tools/cvmfs-default.local b/tools/cvmfs-default.local index 6b11c123d76..04bc26c1e55 100644 --- a/tools/cvmfs-default.local +++ b/tools/cvmfs-default.local @@ -1,5 +1,5 @@ CVMFS_USER=cvmfs -CVMFS_REPOSITORIES=config-osg.opensciencegrid.org,oasis.opensciencegrid.org,gwosc.osgstorage.org +CVMFS_REPOSITORIES=config-osg.opensciencegrid.org,software.igwn.org,gwosc.osgstorage.org CVMFS_QUOTA_LIMIT=5000 CVMFS_HTTP_PROXY=DIRECT CVMFS_MAX_RETRIES=10 diff --git a/tox.ini b/tox.ini index e41f67686d2..5663066ecbf 100644 --- a/tox.ini +++ b/tox.ini @@ -52,9 +52,6 @@ commands = bash tools/pycbc_test_suite.sh [testenv:py-docs] deps = {[base]deps} - ; Needed for `BBHx` package to work with PyCBC - git+https://github.com/mikekatz04/BBHx.git; sys_platform == 'linux' - git+https://github.com/ConWea/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient gcc_linux-64>=12.2.0