From 6e43ce0bf476a3610941a19282899c4c8c157b3d Mon Sep 17 00:00:00 2001 From: Alex Correia Date: Tue, 27 Aug 2024 15:07:01 -0400 Subject: [PATCH] Revert "Merge branch 'gwastro:master' into lisa-emri-pe" --- .github/workflows/basic-tests.yml | 4 +- .github/workflows/distribution.yml | 8 +- .github/workflows/inference-workflow.yml | 4 +- .github/workflows/mac-test.yml | 3 +- .github/workflows/search-workflow.yml | 4 +- .github/workflows/tmpltbank-workflow.yml | 4 +- .github/workflows/tut-test.yml | 2 +- .github/workflows/workflow-tests.yml | 4 +- .gitignore | 14 +- bin/all_sky_search/pycbc_add_statmap | 2 + bin/all_sky_search/pycbc_apply_rerank | 2 + bin/all_sky_search/pycbc_average_psd | 2 + bin/all_sky_search/pycbc_bin_templates | 2 + bin/all_sky_search/pycbc_bin_trigger_rates_dq | 2 + bin/all_sky_search/pycbc_calculate_psd | 2 + bin/all_sky_search/pycbc_coinc_findtrigs | 3 +- bin/all_sky_search/pycbc_coinc_hdfinjfind | 4 +- bin/all_sky_search/pycbc_coinc_mergetrigs | 7 +- bin/all_sky_search/pycbc_coinc_statmap | 4 +- bin/all_sky_search/pycbc_coinc_statmap_inj | 3 + .../pycbc_combine_coincident_events | 2 + bin/all_sky_search/pycbc_combine_statmap | 2 + .../pycbc_cut_merge_triggers_to_tmpltbank | 7 +- .../pycbc_distribute_background_bins | 2 + bin/all_sky_search/pycbc_exclude_zerolag | 3 + bin/all_sky_search/pycbc_fit_sngls_binned | 3 +- .../pycbc_fit_sngls_by_template | 6 +- .../pycbc_fit_sngls_over_multiparam | 15 +- bin/all_sky_search/pycbc_fit_sngls_over_param | 3 +- bin/all_sky_search/pycbc_followup_file | 2 + bin/all_sky_search/pycbc_foreground_censor | 3 +- bin/all_sky_search/pycbc_make_bayestar_skymap | 4 +- bin/all_sky_search/pycbc_merge_psds | 2 + bin/all_sky_search/pycbc_reduce_template_bank | 7 +- bin/all_sky_search/pycbc_rerank_passthrough | 2 + bin/all_sky_search/pycbc_sngls_findtrigs | 5 +- bin/all_sky_search/pycbc_sngls_pastro | 3 + bin/all_sky_search/pycbc_sngls_statmap | 4 +- bin/all_sky_search/pycbc_sngls_statmap_inj | 4 +- bin/all_sky_search/pycbc_strip_injections | 3 +- bin/bank/pycbc_aligned_bank_cat | 2 + bin/bank/pycbc_aligned_stoch_bank | 6 + bin/bank/pycbc_bank_verification | 1 + bin/bank/pycbc_brute_bank | 1 - bin/bank/pycbc_coinc_bank2hdf | 2 + bin/bank/pycbc_geom_aligned_2dstack | 1 + bin/bank/pycbc_geom_aligned_bank | 1 + bin/bank/pycbc_geom_nonspinbank | 1 + bin/bank/pycbc_tmpltbank_to_chi_params | 1 + bin/inference/pycbc_inference | 3 + .../pycbc_inference_plot_acceptance_rate | 4 + bin/inference/pycbc_inference_plot_acf | 3 + bin/inference/pycbc_inference_plot_acl | 3 + .../pycbc_inference_plot_dynesty_run | 3 + .../pycbc_inference_plot_dynesty_traceplot | 3 + .../pycbc_inference_plot_gelman_rubin | 4 +- bin/inference/pycbc_inference_plot_geweke | 4 + .../pycbc_inference_plot_inj_recovery | 4 + bin/inference/pycbc_inference_plot_movie | 3 + bin/inference/pycbc_inference_plot_posterior | 4 + bin/inference/pycbc_inference_plot_pp | 4 + bin/inference/pycbc_inference_plot_prior | 3 + bin/inference/pycbc_inference_plot_samples | 3 + .../pycbc_live_plot_single_significance_fits | 4 +- bin/live/pycbc_live_single_significance_fits | 7 + ...pycbc_live_supervise_collated_trigger_fits | 650 ------------------ ...bc_live_supervise_single_significance_fits | 517 ++++++++++++++ .../pycbc_foreground_minifollowup | 2 + .../pycbc_injection_minifollowup | 2 + bin/minifollowups/pycbc_page_coincinfo | 59 +- bin/minifollowups/pycbc_page_injinfo | 45 +- bin/minifollowups/pycbc_page_snglinfo | 38 +- .../pycbc_plot_trigger_timeseries | 3 + bin/minifollowups/pycbc_single_template_plot | 2 + bin/minifollowups/pycbc_sngl_minifollowup | 59 +- .../pycbc_upload_prep_minifollowup | 2 + .../pycbc_banksim_plot_eff_fitting_factor | 1 + .../pycbc_banksim_plot_fitting_factors | 1 + bin/plotting/pycbc_banksim_table_point_injs | 1 + bin/plotting/pycbc_create_html_snippet | 1 + bin/plotting/pycbc_ifar_catalog | 3 + bin/plotting/pycbc_page_coinc_snrchi | 3 + bin/plotting/pycbc_page_dq_table | 2 + bin/plotting/pycbc_page_foreground | 3 + bin/plotting/pycbc_page_foundmissed | 4 +- bin/plotting/pycbc_page_ifar | 3 + bin/plotting/pycbc_page_injtable | 3 + bin/plotting/pycbc_page_recovery | 4 +- bin/plotting/pycbc_page_segments | 3 + bin/plotting/pycbc_page_segplot | 4 +- bin/plotting/pycbc_page_segtable | 3 + bin/plotting/pycbc_page_sensitivity | 3 + bin/plotting/pycbc_page_snrchi | 3 + bin/plotting/pycbc_page_snrifar | 3 + bin/plotting/pycbc_page_snrratehist | 3 + bin/plotting/pycbc_page_template_bin_table | 2 + bin/plotting/pycbc_page_versioning | 4 +- bin/plotting/pycbc_page_vetotable | 2 + bin/plotting/pycbc_plot_bank_bins | 3 + bin/plotting/pycbc_plot_bank_corner | 63 +- bin/plotting/pycbc_plot_dq_flag_likelihood | 2 + bin/plotting/pycbc_plot_dq_likelihood_vs_time | 2 + bin/plotting/pycbc_plot_dq_percentiles | 2 + bin/plotting/pycbc_plot_gating | 4 +- bin/plotting/pycbc_plot_hist | 2 + bin/plotting/pycbc_plot_multiifo_dtphase | 4 +- bin/plotting/pycbc_plot_psd_file | 3 + bin/plotting/pycbc_plot_qscan | 3 + bin/plotting/pycbc_plot_range | 3 + bin/plotting/pycbc_plot_range_vs_mtot | 3 + bin/plotting/pycbc_plot_singles_timefreq | 3 + bin/plotting/pycbc_plot_singles_vs_params | 3 + bin/plotting/pycbc_plot_throughput | 3 + bin/plotting/pycbc_plot_trigrate | 2 + bin/plotting/pycbc_plot_waveform | 3 + bin/pycbc_banksim | 4 +- bin/pycbc_banksim_combine_banks | 2 + bin/pycbc_banksim_match_combine | 6 +- bin/pycbc_banksim_skymax | 4 +- bin/pycbc_coinc_time | 187 +++++ bin/pycbc_condition_strain | 3 + bin/pycbc_convertinjfiletohdf | 2 + bin/pycbc_create_injections | 4 + bin/pycbc_data_store | 2 + bin/pycbc_faithsim | 3 + bin/pycbc_fit_sngl_trigs | 3 +- bin/pycbc_hdf5_splitbank | 4 +- bin/pycbc_hdf_splitinj | 4 +- bin/pycbc_inj_cut | 2 + bin/pycbc_inspiral | 2 + bin/pycbc_live | 139 ++-- bin/pycbc_make_html_page | 3 + bin/pycbc_make_skymap | 1 + bin/pycbc_merge_inj_hdf | 2 + bin/pycbc_multi_inspiral | 2 + bin/pycbc_optimal_snr | 2 + bin/pycbc_optimize_snr | 4 +- bin/pycbc_single_template | 2 + bin/pycbc_source_probability_offline | 2 + bin/pycbc_split_inspinj | 5 +- bin/pycbc_splitbank | 3 + bin/pygrb/pycbc_grb_inj_finder | 10 +- bin/pygrb/pycbc_grb_trig_cluster | 204 +++--- bin/pygrb/pycbc_grb_trig_combiner | 9 +- bin/pygrb/pycbc_make_offline_grb_workflow | 8 + bin/pygrb/pycbc_pygrb_efficiency | 9 +- bin/pygrb/pycbc_pygrb_exclusion_dist_table | 4 +- bin/pygrb/pycbc_pygrb_grb_info_table | 1 + bin/pygrb/pycbc_pygrb_minifollowups | 2 + bin/pygrb/pycbc_pygrb_page_tables | 3 +- bin/pygrb/pycbc_pygrb_plot_chisq_veto | 19 +- bin/pygrb/pycbc_pygrb_plot_coh_ifosnr | 14 +- bin/pygrb/pycbc_pygrb_plot_injs_results | 3 +- bin/pygrb/pycbc_pygrb_plot_null_stats | 19 +- bin/pygrb/pycbc_pygrb_plot_skygrid | 3 +- bin/pygrb/pycbc_pygrb_plot_snr_timeseries | 24 +- bin/pygrb/pycbc_pygrb_plot_stats_distribution | 3 +- bin/pygrb/pycbc_pygrb_pp_workflow | 1 + .../pycbc_make_bank_verifier_workflow | 1 + bin/workflows/pycbc_make_faithsim_workflow | 4 + .../pycbc_make_inference_inj_workflow | 5 + .../pycbc_make_inference_plots_workflow | 3 + bin/workflows/pycbc_make_inference_workflow | 4 + .../pycbc_make_offline_search_workflow | 5 + .../pycbc_make_psd_estimation_workflow | 3 + bin/workflows/pycbc_make_sbank_workflow | 8 + bin/workflows/pycbc_make_uberbank_workflow | 1 + examples/inference/margtime/margtime.ini | 5 +- examples/inference/margtime/run.sh | 3 +- examples/search/analysis.ini | 3 - examples/search/plotting.ini | 2 - pycbc/__init__.py | 49 +- pycbc/_version.py | 96 +-- pycbc/events/coinc.py | 84 +-- pycbc/events/single.py | 101 +-- pycbc/events/stat.py | 374 ++-------- pycbc/frame/frame.py | 35 +- .../models/marginalized_gaussian_noise.py | 43 +- pycbc/io/hdf.py | 60 +- pycbc/live/__init__.py | 1 - pycbc/live/supervision.py | 154 ----- pycbc/pool.py | 14 +- pycbc/results/dq.py | 6 +- pycbc/results/pygrb_postprocessing_utils.py | 45 +- pycbc/results/scatter_histograms.py | 35 +- pycbc/results/static/css/pycbc/orange.css | 14 - pycbc/results/table_utils.py | 96 +-- pycbc/types/config.py | 2 +- pycbc/types/timeseries.py | 31 +- pyproject.toml | 14 +- requirements-igwn.txt | 2 +- requirements.txt | 12 +- setup.py | 107 ++- test/test_live_coinc_compare.py | 3 +- test/test_timeseries.py | 13 +- tools/docker_build_dist.sh | 1 + tools/pycbc_test_suite.sh | 181 +++-- tools/static/cant_be_built | 1 + tools/static/needs_full_build | 1 + tox.ini | 19 +- 200 files changed, 1803 insertions(+), 2354 deletions(-) mode change 100755 => 100644 bin/bank/pycbc_brute_bank delete mode 100755 bin/live/pycbc_live_supervise_collated_trigger_fits create mode 100755 bin/live/pycbc_live_supervise_single_significance_fits create mode 100644 bin/pycbc_coinc_time delete mode 100644 pycbc/live/supervision.py diff --git a/.github/workflows/basic-tests.yml b/.github/workflows/basic-tests.yml index ab22c715793..d962026e09a 100644 --- a/.github/workflows/basic-tests.yml +++ b/.github/workflows/basic-tests.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 60 matrix: os: [ubuntu-20.04] - python-version: ['3.9', '3.10', '3.11', '3.12'] + python-version: [3.8, 3.9, '3.10', '3.11'] test-type: [unittest, search, docs] steps: - uses: actions/checkout@v3 @@ -50,7 +50,7 @@ jobs: export LAL_DATA_PATH=$PWD tox -e py-inference - name: store documentation page - if: matrix.test-type == 'docs' && matrix.python-version == '3.12' + if: matrix.test-type == 'docs' && matrix.python-version == '3.8' uses: actions/upload-artifact@v2 with: name: documentation-page diff --git a/.github/workflows/distribution.yml b/.github/workflows/distribution.yml index 1b54b501a3d..122f01ae03d 100644 --- a/.github/workflows/distribution.yml +++ b/.github/workflows/distribution.yml @@ -20,13 +20,13 @@ jobs: fetch-depth: 0 - uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - name: Install cibuildwheel run: python -m pip install cibuildwheel - name: Build wheels run: python -m cibuildwheel --output-dir wheelhouse env: - CIBW_BUILD: cp39-* cp310-* cp311-* cp312-* + CIBW_BUILD: cp38-* cp39-* cp310-* cp311-* CIBW_SKIP: "*musllinux*" CIBW_ARCHS_MACOS: x86_64 arm64 - uses: actions/upload-artifact@v2 @@ -40,10 +40,10 @@ jobs: - uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Set up Python 3.10 + - name: Set up Python 3.8 uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - uses: actions/download-artifact@v2 with: path: ./ diff --git a/.github/workflows/inference-workflow.yml b/.github/workflows/inference-workflow.yml index 704351cdb7e..d95e1c5b714 100644 --- a/.github/workflows/inference-workflow.yml +++ b/.github/workflows/inference-workflow.yml @@ -10,7 +10,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - @@ -25,7 +25,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.github/workflows/mac-test.yml b/.github/workflows/mac-test.yml index 7501eb9d9b5..f441f648d5c 100644 --- a/.github/workflows/mac-test.yml +++ b/.github/workflows/mac-test.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 4 matrix: os: [macos-12] - python-version: ['3.10', '3.11', '3.12'] + python-version: [3.8, 3.9, '3.10', '3.11'] steps: - uses: actions/checkout@v1 - name: Set up Python ${{ matrix.python-version }} @@ -25,4 +25,5 @@ jobs: pip install --upgrade pip setuptools "tox<4.0.0" - name: run basic pycbc test suite run: | + sudo chmod -R 777 /usr/local/miniconda/ tox -e py-unittest diff --git a/.github/workflows/search-workflow.yml b/.github/workflows/search-workflow.yml index d3cd392ebb9..9383c5066cb 100644 --- a/.github/workflows/search-workflow.yml +++ b/.github/workflows/search-workflow.yml @@ -15,7 +15,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - @@ -30,7 +30,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.github/workflows/tmpltbank-workflow.yml b/.github/workflows/tmpltbank-workflow.yml index c460bcc0637..66a4f1ba3b2 100644 --- a/.github/workflows/tmpltbank-workflow.yml +++ b/.github/workflows/tmpltbank-workflow.yml @@ -14,7 +14,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - @@ -29,7 +29,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.github/workflows/tut-test.yml b/.github/workflows/tut-test.yml index 11928d632f6..5abd59c9d75 100644 --- a/.github/workflows/tut-test.yml +++ b/.github/workflows/tut-test.yml @@ -13,7 +13,7 @@ jobs: max-parallel: 60 matrix: os: [ubuntu-20.04] - python-version: ['3.10', '3.11', '3.12'] + python-version: [3.8, 3.9, '3.10', '3.11'] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/.github/workflows/workflow-tests.yml b/.github/workflows/workflow-tests.yml index 1b49d7427a5..86410066aff 100644 --- a/.github/workflows/workflow-tests.yml +++ b/.github/workflows/workflow-tests.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: '3.10' + python-version: 3.8 - name: install condor run: | wget -qO - https://research.cs.wisc.edu/htcondor/ubuntu/HTCondor-Release.gpg.key | sudo apt-key add - @@ -34,7 +34,7 @@ jobs: wget -qO - https://download.pegasus.isi.edu/pegasus/gpg.txt | sudo apt-key add - echo "deb https://download.pegasus.isi.edu/pegasus/ubuntu bionic main" | sudo tee -a /etc/apt/sources.list sudo apt-get -o Acquire::Retries=3 update - sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.8-1+ubuntu18 + sudo apt-get -o Acquire::Retries=3 install pegasus=5.0.6-1+ubuntu18 - run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl* - name: Install pycbc run: | diff --git a/.gitignore b/.gitignore index 405cf23fa4a..be6fd9999fb 100644 --- a/.gitignore +++ b/.gitignore @@ -4,18 +4,8 @@ *.log dist/ html/ +pycbc_inspiralc build/ *.pyc docs/Makefile -PyCBC.egg-info/ -pycbc/events/eventmgr_cython.cpp -pycbc/events/simd_threshold_cython.cpp -pycbc/fft/fftw_pruned_cython.cpp -pycbc/filter/matchedfilter_cpu.cpp -pycbc/filter/simd_correlate_cython.cpp -pycbc/inference/models/relbin_cpu.cpp -pycbc/types/array_cpu.cpp -pycbc/vetoes/chisq_cpu.cpp -pycbc/waveform/decompress_cpu_cython.cpp -pycbc/waveform/spa_tmplt_cpu.cpp -pycbc/waveform/utils_cpu.cpp +PyCBC.egg-info diff --git a/bin/all_sky_search/pycbc_add_statmap b/bin/all_sky_search/pycbc_add_statmap index 65b8cff843a..066fd12d1d7 100755 --- a/bin/all_sky_search/pycbc_add_statmap +++ b/bin/all_sky_search/pycbc_add_statmap @@ -26,6 +26,8 @@ def get_ifo_string(fi): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-files', nargs='+', help="List of coinc files to be combined") parser.add_argument('--background-files', nargs='+', default=None, diff --git a/bin/all_sky_search/pycbc_apply_rerank b/bin/all_sky_search/pycbc_apply_rerank index f90ed7f7b8e..68e1126c967 100644 --- a/bin/all_sky_search/pycbc_apply_rerank +++ b/bin/all_sky_search/pycbc_apply_rerank @@ -10,6 +10,8 @@ from shutil import copyfile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--stat-files', nargs='+', help="Statistic files produced by candidate followup codes") parser.add_argument('--followup-file', diff --git a/bin/all_sky_search/pycbc_average_psd b/bin/all_sky_search/pycbc_average_psd index 0f34ae2d456..b89aaff7876 100644 --- a/bin/all_sky_search/pycbc_average_psd +++ b/bin/all_sky_search/pycbc_average_psd @@ -26,11 +26,13 @@ import argparse import numpy as np import pycbc from pycbc.io import HFile +from pycbc.version import git_verbose_msg as version from pycbc.types import MultiDetOptionAction, FrequencySeries parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument('--input-files', nargs='+', required=True, metavar='PATH', help='HDF5 files from pycbc_calculate_psd (one per ' 'detector) containing the input PSDs to average.') diff --git a/bin/all_sky_search/pycbc_bin_templates b/bin/all_sky_search/pycbc_bin_templates index 807c234adbe..4cc9558ec1a 100755 --- a/bin/all_sky_search/pycbc_bin_templates +++ b/bin/all_sky_search/pycbc_bin_templates @@ -8,10 +8,12 @@ import numpy as np import pycbc import pycbc.pnutils +from pycbc.version import git_verbose_msg as version from pycbc.events import background_bin_from_string parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--ifo", type=str, required=True) parser.add_argument("--f-lower", type=float, default=15., help='Enforce a uniform low frequency cutoff to ' diff --git a/bin/all_sky_search/pycbc_bin_trigger_rates_dq b/bin/all_sky_search/pycbc_bin_trigger_rates_dq index f1c6f0ed7fb..ef069d9a6b1 100644 --- a/bin/all_sky_search/pycbc_bin_trigger_rates_dq +++ b/bin/all_sky_search/pycbc_bin_trigger_rates_dq @@ -16,9 +16,11 @@ from pycbc.events.veto import (select_segments_by_definer, segments_to_start_end) from pycbc.types.optparse import MultiDetOptionAction from pycbc.io.hdf import SingleDetTriggers +from pycbc.version import git_verbose_msg as version parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--template-bins-file", required=True) parser.add_argument("--trig-file", required=True) parser.add_argument("--flag-file", required=True) diff --git a/bin/all_sky_search/pycbc_calculate_psd b/bin/all_sky_search/pycbc_calculate_psd index 9764250fc9f..38ea79fddeb 100755 --- a/bin/all_sky_search/pycbc_calculate_psd +++ b/bin/all_sky_search/pycbc_calculate_psd @@ -5,6 +5,7 @@ import logging, argparse, numpy, multiprocessing, time, copy from six.moves import zip_longest import pycbc, pycbc.psd, pycbc.strain, pycbc.events from pycbc.io import HFile +from pycbc.version import git_verbose_msg as version from pycbc.fft.fftw import set_measure_level from pycbc.workflow import resolve_td_option from ligo.segments import segmentlist, segment @@ -12,6 +13,7 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--low-frequency-cutoff", type=float, required=True, help="The low frequency cutoff to use for filtering (Hz)") parser.add_argument("--analysis-segment-file", required=True, diff --git a/bin/all_sky_search/pycbc_coinc_findtrigs b/bin/all_sky_search/pycbc_coinc_findtrigs index 35628648db0..3042df7cb65 100644 --- a/bin/all_sky_search/pycbc_coinc_findtrigs +++ b/bin/all_sky_search/pycbc_coinc_findtrigs @@ -2,8 +2,8 @@ import copy, argparse, logging, numpy, numpy.random import shutil, uuid, os.path, atexit from ligo.segments import infinity -import pycbc from pycbc.events import veto, coinc, stat, ranking, cuts +import pycbc.version from pycbc.io import HFile from pycbc import pool, init_logging from numpy.random import seed, shuffle @@ -12,6 +12,7 @@ from pycbc.types.optparse import MultiDetOptionAction parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument("--veto-files", nargs='*', action='append', default=[], help="Optional veto file. Triggers within veto segments " "contained in the file are ignored") diff --git a/bin/all_sky_search/pycbc_coinc_hdfinjfind b/bin/all_sky_search/pycbc_coinc_hdfinjfind index 4a4f3229e8f..9fc1bd3b858 100755 --- a/bin/all_sky_search/pycbc_coinc_hdfinjfind +++ b/bin/all_sky_search/pycbc_coinc_hdfinjfind @@ -6,12 +6,12 @@ files. import argparse, logging, types, numpy, os.path from ligo.lw import lsctables, utils as ligolw_utils from ligo import segments -import pycbc from pycbc import events, init_logging from pycbc.events import indices_within_segments from pycbc.types import MultiDetOptionAction from pycbc.inject import CBCHDFInjectionSet from pycbc.io import HFile +import pycbc.version from pycbc.io.ligolw import LIGOLWContentHandler @@ -57,6 +57,8 @@ def xml_to_hdf(table, hdf_file, hdf_key, columns): parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-files', nargs='+', required=True) parser.add_argument('--injection-files', nargs='+', required=True) parser.add_argument('--veto-file') diff --git a/bin/all_sky_search/pycbc_coinc_mergetrigs b/bin/all_sky_search/pycbc_coinc_mergetrigs index 77637b85b6d..cf56a23f7c9 100755 --- a/bin/all_sky_search/pycbc_coinc_mergetrigs +++ b/bin/all_sky_search/pycbc_coinc_mergetrigs @@ -4,8 +4,9 @@ """ import numpy, argparse, h5py, logging +import pycbc.version from pycbc.io import HFile -from pycbc import add_common_pycbc_options, init_logging +from pycbc import init_logging def changes(arr): l = numpy.where(arr[:-1] != arr[1:])[0] @@ -30,7 +31,9 @@ def region(f, key, boundaries, ids): dtype=h5py.special_dtype(ref=h5py.RegionReference)) parser = argparse.ArgumentParser() -add_common_pycbc_options(parser) +pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-files', nargs='+') parser.add_argument('--output-file', required=True) parser.add_argument('--bank-file', required=True) diff --git a/bin/all_sky_search/pycbc_coinc_statmap b/bin/all_sky_search/pycbc_coinc_statmap index 8bdbca8bd0f..e4915391750 100755 --- a/bin/all_sky_search/pycbc_coinc_statmap +++ b/bin/all_sky_search/pycbc_coinc_statmap @@ -10,7 +10,7 @@ the FANs of any other gravitational waves in the dataset. import argparse, itertools import lal, logging, numpy from pycbc.events import veto, coinc, significance -import pycbc.pnutils, pycbc.io +import pycbc.version, pycbc.pnutils, pycbc.io import sys import pycbc.conversions as conv @@ -35,6 +35,8 @@ class fw(object): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--coinc-files', nargs='+', help='List of coincidence files used to calculate the ' 'FAP, FAR, etc.') diff --git a/bin/all_sky_search/pycbc_coinc_statmap_inj b/bin/all_sky_search/pycbc_coinc_statmap_inj index 357ccef068a..0c4d33f9409 100644 --- a/bin/all_sky_search/pycbc_coinc_statmap_inj +++ b/bin/all_sky_search/pycbc_coinc_statmap_inj @@ -6,12 +6,15 @@ with producing the combined foreground and background triggers """ import argparse, logging, itertools, copy, pycbc.io, numpy, lal from pycbc.events import veto, coinc, significance +import pycbc.version import pycbc.conversions as conv from pycbc import init_logging parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--cluster-window', type=float, default=10, help='Length of time window in seconds to cluster coinc ' 'events [default=10s]') diff --git a/bin/all_sky_search/pycbc_combine_coincident_events b/bin/all_sky_search/pycbc_combine_coincident_events index 55d928bf66e..86d6be76828 100644 --- a/bin/all_sky_search/pycbc_combine_coincident_events +++ b/bin/all_sky_search/pycbc_combine_coincident_events @@ -9,6 +9,7 @@ import logging import pycbc from pycbc.io import HFile +import pycbc.version def com(f, files, group): """ Combine the same column from multiple files into another file f""" @@ -55,6 +56,7 @@ def com_with_detector_key(f, files, group): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-files', nargs='+', help="List of coinc files to be redistributed") parser.add_argument('--output-file', help="name of output file") diff --git a/bin/all_sky_search/pycbc_combine_statmap b/bin/all_sky_search/pycbc_combine_statmap index 825b164273d..2587e624510 100755 --- a/bin/all_sky_search/pycbc_combine_statmap +++ b/bin/all_sky_search/pycbc_combine_statmap @@ -5,10 +5,12 @@ significant foreground, but leaves the background triggers alone. """ import numpy, argparse, logging, pycbc, pycbc.events, pycbc.io, lal +import pycbc.version from ligo import segments parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-files', nargs='+', help="List of coinc files to be redistributed") parser.add_argument('--cluster-window', type=float) diff --git a/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank b/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank index fafbb03bf12..71aaa7e6ea8 100644 --- a/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank +++ b/bin/all_sky_search/pycbc_cut_merge_triggers_to_tmpltbank @@ -21,15 +21,18 @@ Reduce a MERGE triggers file to a reduced template bank """ import logging +import imp import argparse import numpy import h5py import pycbc +import pycbc.version from pycbc.io import HFile -from pycbc import load_source parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument("--input-file", required=True, help="Input merge triggers HDF file.") parser.add_argument("--output-file", required=True, @@ -50,7 +53,7 @@ pycbc.init_logging(opt.verbose) bank_fd = HFile(opt.full_template_bank, 'r') -modl = load_source('filter_func', opt.filter_func_file) +modl = imp.load_source('filter_func', opt.filter_func_file) func = modl.filter_tmpltbank bool_arr = func(bank_fd) diff --git a/bin/all_sky_search/pycbc_distribute_background_bins b/bin/all_sky_search/pycbc_distribute_background_bins index 039624618b1..66125f8d5ba 100644 --- a/bin/all_sky_search/pycbc_distribute_background_bins +++ b/bin/all_sky_search/pycbc_distribute_background_bins @@ -1,7 +1,9 @@ #!/bin/env python import argparse, numpy, pycbc.events, logging, pycbc.events, pycbc.io +import pycbc.version parser = argparse.ArgumentParser() +parser.add_argument("--version", action=pycbc.version.Version) pycbc.add_common_pycbc_options(parser) parser.add_argument('--coinc-files', nargs='+', help="List of coinc files to be redistributed") diff --git a/bin/all_sky_search/pycbc_exclude_zerolag b/bin/all_sky_search/pycbc_exclude_zerolag index 82dd103d45b..f79cfc55dc9 100644 --- a/bin/all_sky_search/pycbc_exclude_zerolag +++ b/bin/all_sky_search/pycbc_exclude_zerolag @@ -5,11 +5,14 @@ coincidences from *any* coincidence type with ifar above a certain threshold """ import numpy as np, argparse, logging, pycbc, pycbc.io +import pycbc.version from pycbc.events import veto, coinc, significance import pycbc.conversions as conv parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-file', type=str, help="Coinc statmap file to be recalculated based on foreground removal") parser.add_argument('--other-statmap-files', nargs='+', diff --git a/bin/all_sky_search/pycbc_fit_sngls_binned b/bin/all_sky_search/pycbc_fit_sngls_binned index 126ed493fc3..7e51c4005a4 100644 --- a/bin/all_sky_search/pycbc_fit_sngls_binned +++ b/bin/all_sky_search/pycbc_fit_sngls_binned @@ -22,12 +22,12 @@ from matplotlib import pyplot as plt import copy, numpy as np -import pycbc from pycbc import events, bin_utils, results from pycbc.events import triggers from pycbc.events import trigger_fits as trstats from pycbc.events import stat as pystat from pycbc.io import HFile +import pycbc.version #### MAIN #### @@ -35,6 +35,7 @@ parser = argparse.ArgumentParser(usage="", description="Perform maximum-likelihood fits of single inspiral trigger" " distributions to various functions") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--trigger-file", help="Input hdf5 file containing single triggers. " "Required") diff --git a/bin/all_sky_search/pycbc_fit_sngls_by_template b/bin/all_sky_search/pycbc_fit_sngls_by_template index 5e7d31e87e3..f03aede6019 100755 --- a/bin/all_sky_search/pycbc_fit_sngls_by_template +++ b/bin/all_sky_search/pycbc_fit_sngls_by_template @@ -18,11 +18,11 @@ import argparse, logging import copy, numpy as np -import pycbc from pycbc import events, init_logging from pycbc.events import triggers, trigger_fits as trstats from pycbc.events import stat as statsmod from pycbc.types.optparse import MultiDetOptionAction +import pycbc.version from pycbc.io import HFile #### DEFINITIONS AND FUNCTIONS #### @@ -61,6 +61,7 @@ parser = argparse.ArgumentParser(usage="", description="Perform maximum-likelihood fits of single inspiral trigger" " distributions to various functions") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--trigger-file", help="Input hdf5 file containing single triggers. " "Required") @@ -352,9 +353,6 @@ sigma_regions = trigf[args.ifo + '/sigmasq_template'][:] median_sigma = [] for reg in sigma_regions: strigs = trigf[args.ifo + '/sigmasq'][reg] - if len(strigs) == 0: - median_sigma.append(np.nan) - continue median_sigma.append(np.median(strigs) ** 0.5) outfile = HFile(args.output, 'w') diff --git a/bin/all_sky_search/pycbc_fit_sngls_over_multiparam b/bin/all_sky_search/pycbc_fit_sngls_over_multiparam index 45b46fa32e3..e9ab33a94c6 100755 --- a/bin/all_sky_search/pycbc_fit_sngls_over_multiparam +++ b/bin/all_sky_search/pycbc_fit_sngls_over_multiparam @@ -13,10 +13,8 @@ # Public License for more details. -import sys, argparse, logging, numpy +import sys, argparse, logging, pycbc.version, numpy from scipy.stats import norm - -import pycbc from pycbc.events import triggers from pycbc.io import HFile from pycbc import init_logging @@ -179,6 +177,7 @@ parser = argparse.ArgumentParser(usage="", "background model.") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--template-fit-file", required=True, nargs='+', help="hdf5 file(s) containing fit coefficients for each " "individual template. Can smooth over multiple " @@ -342,20 +341,14 @@ if len(args.template_fit_file) > 1: nasum = nabove[tidsort].cumsum() invsum = invalphan[tidsort].cumsum() ntsum = ntotal[tidsort].cumsum() + mssum = median_sigma[tidsort].cumsum() num = right - left tid = tid_unique nabove = (nasum[right] - nasum[left]) / num invalphan = (invsum[right] - invsum[left]) / num ntotal = (ntsum[right] - ntsum[left]) / num - if median_sigma is not None: - # Median sigma is a special one - we need to make sure that - # we do not mess things up when nan values are given, so we - # can't use the special cumsum fast option - median_sigma = [ - numpy.nanmean(median_sigma[tidsort[l:r]]) - for l, r in zip(left, right) - ] + median_sigma = (mssum[right] - mssum[left]) / num if args.output_fits_by_template: # Store fit_by_template values for output file diff --git a/bin/all_sky_search/pycbc_fit_sngls_over_param b/bin/all_sky_search/pycbc_fit_sngls_over_param index eb63ec13101..cd6afcc9123 100644 --- a/bin/all_sky_search/pycbc_fit_sngls_over_param +++ b/bin/all_sky_search/pycbc_fit_sngls_over_param @@ -18,10 +18,10 @@ import argparse, logging import numpy as np -import pycbc from pycbc import init_logging from pycbc.io import HFile from pycbc.events import triggers +import pycbc.version parser = argparse.ArgumentParser(usage="", description="Smooth (regress) the dependence of coefficients describing " @@ -30,6 +30,7 @@ parser = argparse.ArgumentParser(usage="", "background model.") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--template-fit-file", help="Input hdf5 file containing fit coefficients for each" " individual template. Required") diff --git a/bin/all_sky_search/pycbc_followup_file b/bin/all_sky_search/pycbc_followup_file index a1820ecdaf6..22a7bd06f40 100644 --- a/bin/all_sky_search/pycbc_followup_file +++ b/bin/all_sky_search/pycbc_followup_file @@ -7,6 +7,8 @@ from pycbc.io import HFile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--statmap-file', help="Statmap file containing the candidates/background to follow up") parser.add_argument('--bank-file', diff --git a/bin/all_sky_search/pycbc_foreground_censor b/bin/all_sky_search/pycbc_foreground_censor index 78e20e7d6ae..cd3e7a3ef8b 100755 --- a/bin/all_sky_search/pycbc_foreground_censor +++ b/bin/all_sky_search/pycbc_foreground_censor @@ -1,7 +1,7 @@ #!/usr/bin/env python """Make segment file to blind the results from foreground related triggers """ -import os, argparse, logging +import os, argparse, logging, pycbc.version from urllib.parse import urlunparse import pycbc.events from pycbc.workflow import SegFile @@ -9,6 +9,7 @@ from pycbc.io import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--foreground-triggers', help="HDF file containing the zerolag foreground triggers " "from the analysis") diff --git a/bin/all_sky_search/pycbc_make_bayestar_skymap b/bin/all_sky_search/pycbc_make_bayestar_skymap index a8ec0b74094..5d937a2818e 100644 --- a/bin/all_sky_search/pycbc_make_bayestar_skymap +++ b/bin/all_sky_search/pycbc_make_bayestar_skymap @@ -26,7 +26,7 @@ import tempfile from ligo.lw import lsctables, utils as ligolw_utils -import pycbc +import pycbc.version from pycbc import init_logging from pycbc.waveform import bank as wavebank from pycbc.io import WaveformArray @@ -34,6 +34,8 @@ from pycbc.io.ligolw import LIGOLWContentHandler parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--bayestar-executable', help="The bayestar-localize-coinc executable to be run. " "If not given, will use whatever is available in " diff --git a/bin/all_sky_search/pycbc_merge_psds b/bin/all_sky_search/pycbc_merge_psds index dd8bdafde07..d034e2659c3 100755 --- a/bin/all_sky_search/pycbc_merge_psds +++ b/bin/all_sky_search/pycbc_merge_psds @@ -18,10 +18,12 @@ """ Merge hdf psd files """ import logging, argparse, numpy, pycbc.types +from pycbc.version import git_verbose_msg as version from pycbc.io import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument('--psd-files', nargs='+') parser.add_argument("--output-file", required=True) diff --git a/bin/all_sky_search/pycbc_reduce_template_bank b/bin/all_sky_search/pycbc_reduce_template_bank index c8010edb36f..e9243ade8f1 100644 --- a/bin/all_sky_search/pycbc_reduce_template_bank +++ b/bin/all_sky_search/pycbc_reduce_template_bank @@ -23,13 +23,16 @@ Reduce a template bank using some input parameter cuts import numpy import logging +import imp import argparse import pycbc +import pycbc.version from pycbc.io import HFile -from pycbc import load_source parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument("--input-bank", required=True, help="Input template bank HDF file.") parser.add_argument("--output-bank", required=True, @@ -48,7 +51,7 @@ pycbc.init_logging(opt.verbose) bank_fd = HFile(opt.input_bank, 'r') -modl = load_source('filter_func', opt.filter_func_file) +modl = imp.load_source('filter_func', opt.filter_func_file) func = modl.filter_tmpltbank bool_arr = func(bank_fd) diff --git a/bin/all_sky_search/pycbc_rerank_passthrough b/bin/all_sky_search/pycbc_rerank_passthrough index 74c14ea4ac8..60662ad3fa3 100644 --- a/bin/all_sky_search/pycbc_rerank_passthrough +++ b/bin/all_sky_search/pycbc_rerank_passthrough @@ -5,6 +5,8 @@ from pycbc.io import HFile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file', help="File containing the newly assigned statistic values") diff --git a/bin/all_sky_search/pycbc_sngls_findtrigs b/bin/all_sky_search/pycbc_sngls_findtrigs index 066cec88d5f..897a379b32c 100644 --- a/bin/all_sky_search/pycbc_sngls_findtrigs +++ b/bin/all_sky_search/pycbc_sngls_findtrigs @@ -2,10 +2,9 @@ import argparse, logging, h5py, numpy as np from ligo.segments import infinity from numpy.random import seed, shuffle - -import pycbc from pycbc.events import veto, coinc, stat import pycbc.conversions as conv +import pycbc.version from pycbc import io from pycbc.events import cuts, trigger_fits as trfits from pycbc.events.veto import indices_outside_times @@ -14,6 +13,8 @@ from pycbc import init_logging parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action='version', + version=pycbc.version.git_verbose_msg) # Basic file input options parser.add_argument("--trigger-files", type=str, nargs=1, help="File containing single-detector triggers") diff --git a/bin/all_sky_search/pycbc_sngls_pastro b/bin/all_sky_search/pycbc_sngls_pastro index e26a09b0cc7..ccaa252ca67 100644 --- a/bin/all_sky_search/pycbc_sngls_pastro +++ b/bin/all_sky_search/pycbc_sngls_pastro @@ -17,6 +17,7 @@ from pycbc import conversions as conv from pycbc.events import veto, stat, ranking, coinc, single as sngl from pycbc.io.ligolw import LIGOLWContentHandler from ligo.segments import segment, segmentlist +import pycbc.version import matplotlib matplotlib.use('agg') from matplotlib import pyplot as plt @@ -39,6 +40,8 @@ mchirp_power = { parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument("--single-statmap-files", nargs='+', required=True, help="Single statmap files for which p_astro is " "calculated.") diff --git a/bin/all_sky_search/pycbc_sngls_statmap b/bin/all_sky_search/pycbc_sngls_statmap index 074214dbdda..bd462ca767d 100755 --- a/bin/all_sky_search/pycbc_sngls_statmap +++ b/bin/all_sky_search/pycbc_sngls_statmap @@ -10,7 +10,7 @@ import logging, numpy, copy from pycbc.events import veto, coinc from pycbc.events import triggers, trigger_fits as trstats from pycbc.events import significance -import pycbc.pnutils, pycbc.io +import pycbc.version, pycbc.pnutils, pycbc.io import sys import pycbc.conversions as conv @@ -35,6 +35,8 @@ class fw(object): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--sngls-files', nargs='+', help='List of files containing trigger and statistic ' 'information.') diff --git a/bin/all_sky_search/pycbc_sngls_statmap_inj b/bin/all_sky_search/pycbc_sngls_statmap_inj index 177cd7e2fd1..4a9517c6171 100644 --- a/bin/all_sky_search/pycbc_sngls_statmap_inj +++ b/bin/all_sky_search/pycbc_sngls_statmap_inj @@ -10,7 +10,7 @@ import lal, logging, numpy from pycbc.events import veto, coinc from pycbc.events import triggers, trigger_fits as trstats from pycbc.events import significance -import pycbc.pnutils, pycbc.io +import pycbc.version, pycbc.pnutils, pycbc.io import sys import pycbc.conversions as conv @@ -35,6 +35,8 @@ class fw(object): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--sngls-files', nargs='+', help='List of files containign trigger and statistic ' 'information.') diff --git a/bin/all_sky_search/pycbc_strip_injections b/bin/all_sky_search/pycbc_strip_injections index 027ad6b5ac7..0ee3840ad97 100644 --- a/bin/all_sky_search/pycbc_strip_injections +++ b/bin/all_sky_search/pycbc_strip_injections @@ -1,5 +1,5 @@ #!/bin/env python -import numpy, argparse, pycbc.pnutils, logging +import numpy, argparse, pycbc.version, pycbc.pnutils, logging from pycbc.events import veto from pycbc.io.ligolw import LIGOLWContentHandler from ligo.lw import ligolw, table, utils as ligolw_utils @@ -13,6 +13,7 @@ def remove(l, i): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file') parser.add_argument('--veto-file', help="File containing segments used to veto injections") diff --git a/bin/bank/pycbc_aligned_bank_cat b/bin/bank/pycbc_aligned_bank_cat index ae6c4be9110..e5a2b1441bb 100644 --- a/bin/bank/pycbc_aligned_bank_cat +++ b/bin/bank/pycbc_aligned_bank_cat @@ -25,6 +25,7 @@ import logging import glob import argparse import numpy +import pycbc.version from ligo.lw import utils from pycbc import tmpltbank # Old ligolw output functions no longer imported at package level @@ -48,6 +49,7 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument("-i", "--input-glob", help="file glob the list of paramters") parser.add_argument("-I", "--input-files", nargs='+', diff --git a/bin/bank/pycbc_aligned_stoch_bank b/bin/bank/pycbc_aligned_stoch_bank index d6d382d28dd..e615fd75415 100644 --- a/bin/bank/pycbc_aligned_stoch_bank +++ b/bin/bank/pycbc_aligned_stoch_bank @@ -25,6 +25,7 @@ import numpy import logging import pycbc +import pycbc.version from pycbc import tmpltbank # Old ligolw output functions no longer imported at package level import pycbc.tmpltbank.bank_output_utils as bank_output @@ -34,12 +35,17 @@ import pycbc.strain from pycbc.pnutils import named_frequency_cutoffs +__author__ = "Ian Harry " +__version__ = pycbc.version.git_verbose_msg +__program__ = "pycbc_aligned_stoch_bank" + # Read command line option _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options +parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("-V", "--vary-fupper", action="store_true", default=False, help="Use a variable upper frequency cutoff in laying " diff --git a/bin/bank/pycbc_bank_verification b/bin/bank/pycbc_bank_verification index 97d65b07be6..f4707764533 100644 --- a/bin/bank/pycbc_bank_verification +++ b/bin/bank/pycbc_bank_verification @@ -49,6 +49,7 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options +parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--histogram-output-file", action="store", default=None, help="Output a histogram of fitting factors to the " diff --git a/bin/bank/pycbc_brute_bank b/bin/bank/pycbc_brute_bank old mode 100755 new mode 100644 index 61626e48832..58fdea4c5a4 --- a/bin/bank/pycbc_brute_bank +++ b/bin/bank/pycbc_brute_bank @@ -259,7 +259,6 @@ class TriangleBank(object): ({k: params[k][idx] for k in params} for idx in range(total_num)) ): waveform_cache += [return_wf] - pool.close_pool() del pool for hp in waveform_cache: diff --git a/bin/bank/pycbc_coinc_bank2hdf b/bin/bank/pycbc_coinc_bank2hdf index b093b00be3b..db35f48d4be 100644 --- a/bin/bank/pycbc_coinc_bank2hdf +++ b/bin/bank/pycbc_coinc_bank2hdf @@ -55,6 +55,8 @@ def parse_parameters(parameters): return outnames, columns parser = argparse.ArgumentParser() +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('--bank-file', required=True, help="The bank file to load. Must end in '.xml[.gz]' " diff --git a/bin/bank/pycbc_geom_aligned_2dstack b/bin/bank/pycbc_geom_aligned_2dstack index 547450844b9..e12a46c44f8 100644 --- a/bin/bank/pycbc_geom_aligned_2dstack +++ b/bin/bank/pycbc_geom_aligned_2dstack @@ -50,6 +50,7 @@ parser = argparse.ArgumentParser(usage, description=_desc, formatter_class=pycbc.tmpltbank.IndentedHelpFormatterWithNL) pycbc.add_common_pycbc_options(parser) # Code specific options +parser.add_argument('--version', action='version', version=__version__) parser.add_argument("--pn-order", action="store", type=str,\ default=None,\ help="Determines the PN order to use. Note that if you "+\ diff --git a/bin/bank/pycbc_geom_aligned_bank b/bin/bank/pycbc_geom_aligned_bank index aed29a79467..2d1f6f8c5cd 100644 --- a/bin/bank/pycbc_geom_aligned_bank +++ b/bin/bank/pycbc_geom_aligned_bank @@ -178,6 +178,7 @@ parser = argparse.ArgumentParser(description=_desc, # Begin with code specific options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__) parser.add_argument("-s", "--stack-distance", action="store", type=float,\ default=0.2, help="Minimum metric spacing before we "+\ "stack.") diff --git a/bin/bank/pycbc_geom_nonspinbank b/bin/bank/pycbc_geom_nonspinbank index b778f7fafb0..dcf5aa71890 100644 --- a/bin/bank/pycbc_geom_nonspinbank +++ b/bin/bank/pycbc_geom_nonspinbank @@ -48,6 +48,7 @@ parser = argparse.ArgumentParser( formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options +parser.add_argument('--version', action='version', version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--random-seed", action="store", type=int, default=None, diff --git a/bin/bank/pycbc_tmpltbank_to_chi_params b/bin/bank/pycbc_tmpltbank_to_chi_params index b0314589ccb..8f2934d9534 100644 --- a/bin/bank/pycbc_tmpltbank_to_chi_params +++ b/bin/bank/pycbc_tmpltbank_to_chi_params @@ -45,6 +45,7 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=tmpltbank.IndentedHelpFormatterWithNL) # Begin with code specific options +parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-bank", action="store", required=True, help="The template bank to use an input.") diff --git a/bin/inference/pycbc_inference b/bin/inference/pycbc_inference index 701663a3550..d0cfd7b953a 100644 --- a/bin/inference/pycbc_inference +++ b/bin/inference/pycbc_inference @@ -31,6 +31,7 @@ from pycbc import (distributions, transforms, fft, opt, scheme, pool) from pycbc.waveform import generator +from pycbc import __version__ from pycbc import inference from pycbc.inference import (models, burn_in, option_utils) from pycbc.inference.io import loadfile @@ -40,6 +41,8 @@ from pycbc.workflow import configuration parser = argparse.ArgumentParser(usage=__file__ + " [--options]", description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__, + help="Prints version information.") # output options parser.add_argument("--output-file", type=str, required=True, help="Output file path.") diff --git a/bin/inference/pycbc_inference_plot_acceptance_rate b/bin/inference/pycbc_inference_plot_acceptance_rate index 2fb563c27ae..fe0b81555b4 100644 --- a/bin/inference/pycbc_inference_plot_acceptance_rate +++ b/bin/inference/pycbc_inference_plot_acceptance_rate @@ -23,7 +23,9 @@ use('agg') import matplotlib.pyplot as plt import numpy import pycbc +import pycbc.version from pycbc import results +from pycbc import __version__ from pycbc.inference import io import sys @@ -35,6 +37,8 @@ parser = argparse.ArgumentParser( pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-file", type=str, required=True, help="Path to input HDF file.") +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_acf b/bin/inference/pycbc_inference_plot_acf index 3d6949e609b..41ab706395a 100644 --- a/bin/inference/pycbc_inference_plot_acf +++ b/bin/inference/pycbc_inference_plot_acf @@ -29,6 +29,7 @@ import pycbc from pycbc import results from pycbc.inference import io +from pycbc import __version__ from pycbc.inference import option_utils from pycbc.inference.sampler import samplers @@ -38,6 +39,8 @@ parser = io.ResultsArgumentParser(skip_args='thin-interval', description="Plots autocorrelation function " "from inference samples.") pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_acl b/bin/inference/pycbc_inference_plot_acl index b46cb18c930..860a9810713 100644 --- a/bin/inference/pycbc_inference_plot_acl +++ b/bin/inference/pycbc_inference_plot_acl @@ -30,6 +30,7 @@ import pycbc from pycbc import results from pycbc.filter import autocorrelation +from pycbc import __version__ from pycbc.inference import io # command line usage @@ -39,6 +40,8 @@ parser = io.ResultsArgumentParser(skip_args=['thin-interval', 'temps'], "length per walker from an MCMC " "sampler.") pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_dynesty_run b/bin/inference/pycbc_inference_plot_dynesty_run index 3e1aa6795b2..6e2c8d03afb 100644 --- a/bin/inference/pycbc_inference_plot_dynesty_run +++ b/bin/inference/pycbc_inference_plot_dynesty_run @@ -26,6 +26,7 @@ from dynesty import plotting as dyplot from pycbc.inference import io import pycbc +from pycbc import __version__ from pycbc import results import sys @@ -36,6 +37,8 @@ parser = argparse.ArgumentParser( pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-file", type=str, required=True, help="Path to input HDF file.") +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_dynesty_traceplot b/bin/inference/pycbc_inference_plot_dynesty_traceplot index 2d6c9a968b6..c7e352f91b7 100644 --- a/bin/inference/pycbc_inference_plot_dynesty_traceplot +++ b/bin/inference/pycbc_inference_plot_dynesty_traceplot @@ -26,6 +26,7 @@ from dynesty import plotting as dyplot from pycbc.inference import io import pycbc +from pycbc import __version__ from pycbc import results import sys @@ -38,6 +39,8 @@ parser = argparse.ArgumentParser( pycbc.add_common_pycbc_options(parser) parser.add_argument("--input-file", type=str, required=True, help="Path to input HDF file.") +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') # output plot options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_gelman_rubin b/bin/inference/pycbc_inference_plot_gelman_rubin index 2af9fd1fc76..9f3be01d50d 100644 --- a/bin/inference/pycbc_inference_plot_gelman_rubin +++ b/bin/inference/pycbc_inference_plot_gelman_rubin @@ -25,7 +25,7 @@ import matplotlib.pyplot as plt import sys from pycbc import ( - results, init_logging, add_common_pycbc_options + __version__, results, init_logging, add_common_pycbc_options ) from pycbc.inference import (gelman_rubin, io, option_utils) @@ -33,6 +33,8 @@ from pycbc.inference import (gelman_rubin, io, option_utils) parser = io.ResultsArgumentParser(skip_args=['walkers']) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') # output options parser.add_argument("--output-file", type=str, required=True, diff --git a/bin/inference/pycbc_inference_plot_geweke b/bin/inference/pycbc_inference_plot_geweke index 650fe4254bc..755b2f3268a 100644 --- a/bin/inference/pycbc_inference_plot_geweke +++ b/bin/inference/pycbc_inference_plot_geweke @@ -26,6 +26,7 @@ import pycbc from pycbc import results import sys +from pycbc import __version__ from pycbc.inference import (io, geweke, option_utils) # add options to command line @@ -35,6 +36,9 @@ pycbc.add_common_pycbc_options(parser) # program-specific +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') + # output options parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") diff --git a/bin/inference/pycbc_inference_plot_inj_recovery b/bin/inference/pycbc_inference_plot_inj_recovery index 7c9b0f048c9..fc51655bdd8 100644 --- a/bin/inference/pycbc_inference_plot_inj_recovery +++ b/bin/inference/pycbc_inference_plot_inj_recovery @@ -11,8 +11,10 @@ import matplotlib.colorbar as cbar import matplotlib.pyplot as plt import numpy import pycbc +import pycbc.version from matplotlib import cm from pycbc import inject +from pycbc import __version__ from pycbc.inference import (option_utils, io) from pycbc.results import save_fig_with_metadata @@ -20,6 +22,8 @@ from pycbc.results import save_fig_with_metadata parser = io.ResultsArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__, + help="Prints version information.") parser.add_argument("--output-file", required=True, type=str, help="Path to save output plot.") parser.add_argument("--percentiles", nargs=2, type=float, default=[5, 95], diff --git a/bin/inference/pycbc_inference_plot_movie b/bin/inference/pycbc_inference_plot_movie index 7db38b92016..c25b0d6ed75 100644 --- a/bin/inference/pycbc_inference_plot_movie +++ b/bin/inference/pycbc_inference_plot_movie @@ -53,6 +53,7 @@ from matplotlib import pyplot import pycbc.results from pycbc import conversions +from pycbc import __version__ from pycbc.inference import (option_utils, io) from pycbc.results.scatter_histograms import (create_multidim_plot, @@ -110,6 +111,8 @@ def integer_logspace(start, end, num): skip_args = ['thin-start', 'thin-interval', 'thin-end', 'iteration'] parser = io.ResultsArgumentParser(description=__doc__, skip_args=skip_args) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__, + help="show version number and exit") # make frame number and frame step mutually exclusive group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--frame-number", type=int, diff --git a/bin/inference/pycbc_inference_plot_posterior b/bin/inference/pycbc_inference_plot_posterior index 66890ced876..45357266ad3 100644 --- a/bin/inference/pycbc_inference_plot_posterior +++ b/bin/inference/pycbc_inference_plot_posterior @@ -36,9 +36,11 @@ import matplotlib from matplotlib import (patches, use) import pycbc +import pycbc.version from pycbc.results.plot import (add_style_opt_to_parser, set_style_from_cli) from pycbc.results import metadata from pycbc.io import FieldArray +from pycbc import __version__ from pycbc import conversions from pycbc.workflow import WorkflowConfigParser from pycbc.inference import (option_utils, io) @@ -52,6 +54,8 @@ use('agg') parser = io.ResultsArgumentParser() pycbc.add_common_pycbc_options(parser) # program-specific +parser.add_argument("--version", action="version", version=__version__, + help="Prints version information.") parser.add_argument("--output-file", type=str, required=True, help="Output plot path.") parser.add_argument("--plot-prior", nargs="+", type=str, diff --git a/bin/inference/pycbc_inference_plot_pp b/bin/inference/pycbc_inference_plot_pp index d23268b6f49..5e6490499c8 100644 --- a/bin/inference/pycbc_inference_plot_pp +++ b/bin/inference/pycbc_inference_plot_pp @@ -38,9 +38,13 @@ import pycbc.results.plot from pycbc.results import save_fig_with_metadata from pycbc.inference import (option_utils, io) +from pycbc import __version__ + # parse command line parser = io.ResultsArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__, + help='show version number and exit') parser.add_argument("--output-file", required=True, type=str, help="Path to save output plot.") parser.add_argument("--injection-hdf-group", default="injections", diff --git a/bin/inference/pycbc_inference_plot_prior b/bin/inference/pycbc_inference_plot_prior index c656dac3955..436d0c6f0e6 100644 --- a/bin/inference/pycbc_inference_plot_prior +++ b/bin/inference/pycbc_inference_plot_prior @@ -28,6 +28,7 @@ use('agg') from matplotlib import pyplot as plt import pycbc +from pycbc import __version__ from pycbc import (distributions, results, waveform) from pycbc.inference.option_utils import ParseParametersArg from pycbc.distributions.utils import prior_from_config @@ -75,6 +76,8 @@ parser.add_argument("--nsamples", type=int, default=10000, "plotting. Default is 10000.") parser.add_argument("--output-file", type=str, required=True, help="Path to output plot.") +parser.add_argument("--version", action="version", version=__version__, + help="show version number and exit") # parse the command line opts = parser.parse_args() diff --git a/bin/inference/pycbc_inference_plot_samples b/bin/inference/pycbc_inference_plot_samples index f79996dd82d..651c79e5178 100644 --- a/bin/inference/pycbc_inference_plot_samples +++ b/bin/inference/pycbc_inference_plot_samples @@ -27,6 +27,7 @@ from matplotlib import rc import numpy import pycbc from pycbc import results +from pycbc import __version__ from pycbc.inference import (option_utils, io) import sys @@ -34,6 +35,8 @@ import sys parser = argparse.parser = io.ResultsArgumentParser( skip_args=['chains', 'iteration']) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__, + help="show version number and exit") parser.add_argument("--chains", nargs='+', default=None, help="Chain/walker indices to plot. Options are 'all' or " "one or more chain indices. Default is to plot the " diff --git a/bin/live/pycbc_live_plot_single_significance_fits b/bin/live/pycbc_live_plot_single_significance_fits index 3dc2414a297..5010066856a 100644 --- a/bin/live/pycbc_live_plot_single_significance_fits +++ b/bin/live/pycbc_live_plot_single_significance_fits @@ -131,10 +131,10 @@ for ifo in all_ifos: continue # Keep track of some maxima for use in setting the plot limits - maxstat = np.nanmax(stats[ifo]) + maxstat = stats[ifo].max() max_rate = 0 - statrange = maxstat - max(np.nanmin(stats[ifo]), fit_threshold[ifo]) + statrange = maxstat - max(stats[ifo].min(), fit_threshold[ifo]) plotmax = maxstat + statrange * 0.05 plotbins = np.linspace(fit_threshold[ifo], plotmax, 400) diff --git a/bin/live/pycbc_live_single_significance_fits b/bin/live/pycbc_live_single_significance_fits index b1e76c32612..d122d6b2787 100644 --- a/bin/live/pycbc_live_single_significance_fits +++ b/bin/live/pycbc_live_single_significance_fits @@ -98,6 +98,13 @@ args.trigger_cuts = args.trigger_cuts or [] args.trigger_cuts.append(f"end_time:{args.gps_start_time}:lower_inc") args.trigger_cuts.append(f"end_time:{args.gps_end_time}:upper_inc") +# Efficiency saving: add SNR cut before any others as sngl_ranking can +# only be less than SNR. +args.trigger_cuts.insert(0, f"snr:{args.fit_threshold}:lower_inc") + +# Cut triggers with sngl-ranking below threshold +args.trigger_cuts.append(f"{args.sngl_ranking}:{args.fit_threshold}:lower_inc") + trigger_cut_dict, template_cut_dict = cuts.ingest_cuts_option_group(args) logging.info("Setting up duration bins") diff --git a/bin/live/pycbc_live_supervise_collated_trigger_fits b/bin/live/pycbc_live_supervise_collated_trigger_fits deleted file mode 100755 index 087ae5d1339..00000000000 --- a/bin/live/pycbc_live_supervise_collated_trigger_fits +++ /dev/null @@ -1,650 +0,0 @@ -#!/usr/bin/env python - -"""Supervise the periodic re-fitting of PyCBC Live single-detector triggers, -and the associated plots. -""" - -import re -import logging -import argparse -from datetime import datetime, timedelta -from dateutil.relativedelta import relativedelta -import os -import shutil -import subprocess -import numpy as np - -from lal import gpstime - -import pycbc -from pycbc.io.hdf import HFile -from pycbc.live import supervision as sv -from pycbc.types.config import InterpolatingConfigParser as icp - -def read_options(args): - """ - read the options into a dictionary - """ - logging.info("Reading config file") - cp = icp(configFiles=[args.config_file]) - config_opts = { - section: {k: v for k, v in cp[section].items()} - for section in cp.sections() - } - del config_opts['environment'] - return config_opts - -def trigger_collation( - day_dt, - day_str, - collation_control_options, - collation_options, - output_dir, - controls - ): - """ - Perform the trigger collation as specified - """ - logging.info("Performing trigger collation") - collate_args = [ - 'pycbc_live_collate_triggers', - ] - collate_args += sv.dict_to_args(collation_options) - gps_start = gpstime.utc_to_gps(day_dt).gpsSeconds - gps_end = gpstime.utc_to_gps(day_dt + timedelta(days=1)).gpsSeconds - - trig_merge_file = os.path.join( - output_dir, - collation_control_options['collated-triggers-format'].format( - ifos=''.join(controls['ifos'].split()), - start=gps_start, - duration=(gps_end - gps_start) - ) - ) - collate_args += [ - '--gps-start-time', f'{gps_start:d}', - '--gps-end-time', f'{gps_end:d}', - '--output-file', trig_merge_file, - ] - - sv.run_and_error(collate_args, controls) - - return trig_merge_file - - -def fit_by_template( - trigger_merge_file, - day_str, - fbt_control_options, - fbt_options, - output_dir, - ifo, - controls - ): - """ - Supervise the running of pycbc_fit_sngls_by_template on live triggers - """ - logging.info("Performing daily fit_by_template") - fbt_out_fname = fbt_control_options['fit-by-template-format'].format( - date=day_str, - ifo=ifo, - ) - fbt_out_full = os.path.join(output_dir, fbt_out_fname) - fit_by_args = ['pycbc_fit_sngls_by_template'] - fit_by_args += ['--trigger-file', trigger_merge_file] - fit_by_args += sv.dict_to_args(fbt_options) - fit_by_args += ['--output', fbt_out_full, '--ifo', ifo] - sv.run_and_error(fit_by_args, controls) - - return fbt_out_full, day_str - - -def find_daily_fit_files( - combined_control_options, - daily_fname_format, - daily_files_dir, - ifo=None - ): - """ - Find files which match the specified formats - """ - log_str = f"Finding files in {daily_files_dir} with format {daily_fname_format}" - if ifo is not None: - log_str += f"in detector {ifo}" - logging.info(log_str) - combined_days = int(combined_control_options['combined-days']) - if 'replay-start-time' in combined_control_options: - replay_start_time = int(combined_control_options['replay-start-time']) - true_start_time = int(combined_control_options['true-start-time']) - replay_duration = int(combined_control_options['replay-duration']) - rep_start_utc = lal.GPSToUTC(replay_start_time)[0:6] - - dt_replay_start = datetime( - year=rep_start_utc[0], - month=rep_start_utc[1], - day=rep_start_utc[2], - hour=rep_start_utc[3], - minute=rep_start_utc[4], - second=rep_start_utc[5] - ) - - td = (day_dt - dt_replay_start).total_seconds() - - # Time since the start of this replay - time_since_replay = np.remainder(td, replay_duration) - - # Add this on to the original start time to get the current time of - # the replay data - true_utc = lal.GPSToUTC(true_start_time)[0:6] - dt_true_start = datetime( - year=true_utc[0], - month=true_utc[1], - day=true_utc[2], - hour=true_utc[3], - minute=true_utc[4], - second=true_utc[5] - ) - # Original time of the data being replayed right now - current_date = dt_true_start + timedelta(seconds=time_since_replay) - else: - current_date = day_dt - - date_test = current_date + timedelta(days=1) - - daily_files = [] - missed_files = 0 - # Maximum consecutive number of days between files before a warning is raised - # 10 days of the detector being off would be unusual for current detectors - max_nmissed = combined_control_options.get('maximum_missed_files', 10) - found_files = 0 - while found_files < combined_days and missed_files < max_nmissed: - # Loop through the possible file locations and see if the file exists - date_test -= timedelta(days=1) - date_out = date_test.strftime("%Y_%m_%d") - daily_fname = daily_fname_format.format( - date=date_out, - ifo=ifo, - ) - - output_dir = os.path.join( - daily_files_dir, - date_out - ) - daily_full = os.path.join( - output_dir, - daily_fname - ) - # Check that the file exists: - if not os.path.exists(daily_full): - missed_files += 1 - logging.info("File %s does not exist - skipping", daily_full) - continue - if not len(daily_files): - end_date = date_out - # This is now the oldest file - first_date = date_out - # reset the "missed files" counter, and add to the "found files" - missed_files = 0 - found_files += 1 - daily_files.append(daily_full) - - if found_files == 0: - raise RuntimeError("No files found") - - if missed_files == max_nmissed: - # If more than a set maximum days between files, something - # is wrong with the analysis. Warn about this and use fewer - # files - logging.warning( - f'More than {max_nmissed} days between files, only using ' - f'{found_files} files!' - ) - - return daily_files, first_date, end_date - - -def fit_over_multiparam( - fit_over_controls, - fit_over_options, - ifo, - day_str, - output_dir, - controls - ): - """ - Supervise the smoothing of live trigger fits using - pycbc_fit_sngls_over_multiparam - """ - daily_files, first_date, end_date = find_daily_fit_files( - fit_over_controls, - fit_over_controls['fit-by-format'], - controls['output-directory'], - ifo=ifo - ) - logging.info( - "Smoothing fits using fit_over_multiparam with %d files and " - "specified parameters", - len(daily_files) - ) - logging.info( - "Smoothing fits using fit_over_multiparam with %d files and " - "specified parameters", - len(daily_files) - ) - file_id_str = f'{first_date}-{end_date}' - out_fname = fit_over_controls['fit-over-format'].format( - dates=file_id_str, - ifo=ifo, - ) - - fit_over_args = ['pycbc_fit_sngls_over_multiparam', '--template-fit-file'] - fit_over_args += daily_files - fit_over_args += sv.dict_to_args(fit_over_options) - fit_over_full = os.path.join(output_dir, out_fname) - fit_over_args += ['--output', fit_over_full] - sv.run_and_error(fit_over_args, controls) - if 'variable-fit-over-param' in fit_over_controls: - variable_fits = fit_over_controls['variable-fit-over-param'].format( - ifo=ifo - ) - sv.symlink(fit_over_full, variable_fits) - - return fit_over_full, file_id_str - -def plot_fits( - fits_file, - ifo, - day_title_str, - plot_fit_options, - controls, - smoothed=False - ): - """Plotting for fit_by files, and linking to the public directory""" - fits_plot_output = fits_file[:-3] + 'png' - logging.info( - "Plotting template fits %s to %s", - fits_file, - fits_plot_output - ) - fits_plot_arguments = [ - 'pycbc_plot_bank_corner', - '--fits-file', - fits_file, - '--output-plot-file', - fits_plot_output, - ] - fits_plot_arguments += sv.dict_to_args(plot_fit_options) - - title = "Fit parameters for pycbc-live, triggers from {}, {}".format( - ifo, - day_title_str - ) - if smoothed == True: - title += ', smoothed' - fits_plot_arguments += ['--title', title] - sv.run_and_error(fits_plot_arguments, controls) - if 'public-dir' in controls: - public_dir = os.path.abspath(os.path.join( - controls['public-dir'], - *day_str.split('_') - )) - sv.symlink(fits_plot_output, public_dir) - - -def single_significance_fits( - daily_controls, - daily_options, - output_dir, - day_str, - day_dt, - controls, - stat_files=None, - ): - """ - Supervise the significance fits for live triggers using - pycbc_live_single_significance_fits - """ - daily_options['output'] = os.path.join( - output_dir, - daily_controls['sig-daily-format'].format( - ifos=''.join(sorted(controls['ifos'].split())), - date=day_str - ), - ) - daily_args = ['pycbc_live_single_significance_fits'] - - gps_start_time = gpstime.utc_to_gps(day_dt).gpsSeconds - gps_end_time = gpstime.utc_to_gps(day_dt + timedelta(days=1)).gpsSeconds - daily_options['gps-start-time'] = f'{gps_start_time:d}' - daily_options['gps-end-time'] = f'{gps_end_time:d}' - daily_args += sv.dict_to_args(daily_options) - if stat_files is not None: - daily_args += ['--statistic-files'] + stat_files - - sv.run_and_error(daily_args, controls) - - return daily_options['output'] - -def plot_single_significance_fits(daily_output, daily_plot_options, controls): - """ - Plotting daily significance fits, and link to public directory if wanted - """ - daily_plot_output = daily_output[:-4].replace( - ''.join(sorted(controls['ifos'].split())), - '{ifo}' - ) + '.png' - logging.info( - "Plotting daily significance fits from %s to %s", - daily_output, - daily_plot_output - ) - daily_plot_arguments = [ - 'pycbc_live_plot_single_significance_fits', - '--trigger-fits-file', - daily_output, - '--output-plot-name-format', - daily_plot_output, - ] - daily_plot_arguments += sv.dict_to_args(daily_plot_options) - sv.run_and_error(daily_plot_arguments, controls) - - # Link the plots to the public-dir if wanted - if 'public-dir' in controls: - daily_plot_outputs = [ - daily_plot_output.format(ifo=ifo) - for ifo in controls['ifos'].split() - ] - logging.info("Linking daily fits plots") - for dpo in daily_plot_outputs: - public_dir = os.path.abspath(os.path.join( - controls['public-dir'], - *day_str.split('_') - )) - - sv.symlink(dpo, public_dir) - - -def combine_significance_fits( - combined_options, - combined_controls, - output_dir, - day_str, - controls - ): - """ - Supervise the smoothing of live trigger significance fits using - pycbc_live_combine_single_significance_fits - """ - # This has a trick to do partial formatting, get the IFOs into the - # string, but not the date - daily_files, first_date, end_date = find_daily_fit_files( - combined_controls, - combined_controls['daily-format'].format( - ifos=''.join(sorted(controls['ifos'].split())), - date='{date}' - ), - controls['output-directory'], - ) - logging.info( - "Smoothing significance fits over %d files", - len(daily_files) - ) - date_range = f'{first_date}-{end_date}' - outfile_name = combined_controls['outfile-format'].format( - date=day_str, - date_range=date_range, - ) - combined_options['output'] = os.path.join(output_dir, outfile_name) - combined_options['trfits-files'] = ' '.join(daily_files) - - combined_args = ['pycbc_live_combine_single_significance_fits'] - combined_args += sv.dict_to_args(combined_options) - - sv.run_and_error(combined_args, controls) - - if 'variable-significance-fits' in combined_controls: - logging.info("Linking to variable significance fits file") - sv.symlink( - combined_options['output'], - combined_controls['variable-significance-fits'] - ) - - return combined_options['output'], date_range - -def plot_combined_significance_fits( - csf_file, - date_range, - output_dir, - combined_plot_options, - combined_plot_control_options, - controls - ): - """ - Plotting combined significance fits, and link to public directory if wanted - """ - - oput_fmt = combined_plot_control_options['output-plot-name-format'] - if not '{date_range}' in oput_fmt: - raise RuntimeError( - "Must specify {date_range} in output-plot-name-format" - ) - oput_fmt = oput_fmt.replace('{date_range}', date_range) - oput_full = os.path.join(output_dir, oput_fmt) - combined_plot_arguments = [ - 'pycbc_live_plot_combined_single_significance_fits', - '--combined-fits-file', - csf_file, - '--output-plot-name-format', - oput_full - ] - combined_plot_arguments += sv.dict_to_args(combined_plot_options) - - sv.run_and_error(combined_plot_arguments, controls) - - # Get the list of combined plotting output files: - combined_plot_outputs = [ - oput_full.format(ifo=ifo, type='fit_coeffs') for ifo in - controls['ifos'].split() - ] - combined_plot_outputs += [ - oput_full.format(ifo=ifo, type='counts') for ifo in - controls['ifos'].split() - ] - - if 'public-dir' in controls: - logging.info("Linking combined fits to public dir") - public_dir = os.path.abspath(os.path.join( - controls['public-dir'], - *day_str.split('_') - )) - for cpo in combined_plot_outputs: - sv.symlink(cpo, public_dir) - -def supervise_collation_fits_dq(args, day_dt, day_str): - """ - Perform the trigger collation and fits etc. as specified - """ - # Read in the config file and pack into appropriate dictionaries - config_opts = read_options(args) - controls = config_opts['control'] - collation_options = config_opts['collation'] - collation_control_options = config_opts['collation_control'] - fit_by_template_options = config_opts['fit_by_template'] - fit_by_template_control_options = config_opts['fit_by_template_control'] - fit_over_options = config_opts['fit_over_multiparam'] - fit_over_control_options = config_opts['fit_over_multiparam_control'] - plot_fit_options = config_opts['plot_fit'] - daily_options = config_opts['significance_daily_fits'] - daily_control_options = config_opts['significance_daily_fits_control'] - daily_plot_options = config_opts['plot_significance_daily'] - combined_options = config_opts['significance_combined_fits'] - combined_control_options = config_opts['significance_combined_fits_control'] - combined_plot_options = config_opts['plot_significance_combined'] - combined_plot_control_options = config_opts['plot_significance_combined_control'] - - # The main output directory will have a date subdirectory which we - # put the output into - sv.ensure_directories(controls, day_str) - - ifos = controls['ifos'].split() - output_dir = os.path.join( - controls['output-directory'], - day_str - ) - logging.info("Outputs to %s", output_dir) - if 'public_dir' in controls: - public_dir = os.path.abspath(os.path.join( - controls['public-dir'], - *day_str.split('_') - )) - logging.info("Outputs to be linked to % ", public_dir) - - merged_triggers = trigger_collation( - day_dt, - day_str, - collation_control_options, - collation_options, - output_dir, - controls - ) - # Store the locations of files needed for the statistic - stat_files = [] - for ifo in config_opts['control']['ifos'].split(): - if args.fit_by_template: - fbt_file, date_str = fit_by_template( - merged_triggers, - day_str, - fit_by_template_control_options, - fit_by_template_options, - output_dir, - ifo, - controls, - ) - plot_fits( - fbt_file, - ifo, - date_str, - plot_fit_options, - controls - ) - - if args.fit_over_multiparam: - fom_file, date_str = fit_over_multiparam( - fit_over_control_options, - fit_over_options, - ifo, - day_str, - output_dir, - controls - ) - stat_files.append(fom_file) - plot_fits( - fom_file, - ifo, - date_str, - plot_fit_options, - controls, - smoothed=True, - ) - - if args.single_significance_fits: - ssf_file = single_significance_fits( - daily_control_options, - daily_options, - output_dir, - day_str, - day_dt, - controls, - stat_files=stat_files, - ) - plot_single_significance_fits( - ssf_file, - daily_plot_options, - controls - ) - if args.combine_significance_fits: - csf_file, date_str = combine_significance_fits( - combined_options, - combined_control_options, - output_dir, - date_str, - controls - ) - plot_combined_significance_fits( - csf_file, - date_str, - output_dir, - combined_plot_options, - combined_plot_control_options, - controls - ) - - -def get_yesterday_date(): - """ Get the date string for yesterday's triggers """ - day_dt = datetime.utcnow() - timedelta(days=1) - day_dt = datetime.combine(day_dt, datetime.min.time()) - day_str = day_dt.strftime('%Y_%m_%d') - return day_dt, day_str - -parser = argparse.ArgumentParser(description=__doc__) -pycbc.add_common_pycbc_options(parser) -parser.add_argument( - '--config-file', - required=True -) -parser.add_argument( - '--date', - help='Date to analyse, if not given, will analyse yesterday (UTC). ' - 'Format YYYY_MM_DD. Do not use if using --run-daily-at.' -) -parser.add_argument( - '--fit-by-template', - action='store_true', - help="Perform template fits calculation." -) -parser.add_argument( - '--fit-over-multiparam', - action='store_true', - help="Perform template fits smoothing." -) -parser.add_argument( - '--single-significance-fits', - action='store_true', - help="Perform daily singles significance fits." -) -parser.add_argument( - '--combine-significance-fits', - action='store_true', - help="Do combination of singles significance fits." -) -parser.add_argument( - '--run-daily-at', - metavar='HH:MM:SS', - help='Stay running and repeat the fitting daily at the given UTC hour.' -) -args = parser.parse_args() - -pycbc.init_logging(args.verbose, default_level=1) - -if args.run_daily_at is not None and args.date is not None: - parser.error('Cannot take --run-daily-at and --date at the same time') - -if args.run_daily_at is not None: - # keep running and repeat the fitting every day at the given hour - if not re.match('[0-9][0-9]:[0-9][0-9]:[0-9][0-9]', args.run_daily_at): - parser.error('--run-daily-at takes a UTC time in the format HH:MM:SS') - logging.info('Starting in daily run mode') - while True: - sv.wait_for_utc_time(args.run_daily_at) - day_dt, day_str = get_yesterday_date() - logging.info('==== Time to update the single fits, waking up ====') - supervise_collation_fits_dq(args, day_dt, day_str) -else: - # run just once - if args.date: - day_str = args.date - day_dt = datetime.strptime(args.date, '%Y_%m_%d') - else: - day_dt, day_str = get_yesterday_date() - supervise_collation_fits_dq(args, day_dt, day_str) diff --git a/bin/live/pycbc_live_supervise_single_significance_fits b/bin/live/pycbc_live_supervise_single_significance_fits new file mode 100755 index 00000000000..864a458037f --- /dev/null +++ b/bin/live/pycbc_live_supervise_single_significance_fits @@ -0,0 +1,517 @@ +#!/usr/bin/env python + +"""Supervise the periodic re-fitting of PyCBC Live single-detector triggers, +and the associated plots. +""" + +import re +import logging +import argparse +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta +import time +import copy +import os +import shutil +import subprocess +import numpy as np + +from lal import gpstime + +import pycbc +from pycbc.io.hdf import HFile + + +def symlink(target, link_name): + """Create a symbolic link replacing the destination and checking for + errors. + """ + cp = subprocess.run([ + 'ln', '-sf', target, link_name + ]) + if cp.returncode: + raise subprocess.SubprocessError( + f"Could not link plot {target} to {link_name}" + ) + + +def dict_to_args(opts_dict): + """ + Convert an option dictionary into a list to be used by subprocess.run + """ + dargs = [] + for option in opts_dict.keys(): + dargs.append('--' + option.strip()) + value = opts_dict[option] + if len(value.split()) > 1: + # value is a list, append individually + for v in value.split(): + dargs.append(v) + elif not value: + # option is a flag, do nothing + continue + else: + # Single value option - easy enough + dargs.append(value) + return dargs + + +def mail_volunteers_error(controls, mail_body_lines, subject): + """ + Email a list of people, defined by mail-volunteers-file + To be used for errors or unusual occurences + """ + if 'mail_volunteers_file' not in controls: + logging.info( + "No file containing people to email, logging the error instead" + ) + for line in mail_body_lines: + logging.warning(line) + return + with open(controls['mail_volunteers_file'], 'r') as mail_volunteers_file: + volunteers = [volunteer.strip() for volunteer in + mail_volunteers_file.readlines()] + logging.info("Emailing %s with warnings", ' '.join(volunteers)) + mail_command = [ + 'mail', + '-s', + subject + ] + mail_command += volunteers + mail_body = '\n'.join(mail_body_lines) + subprocess.run(mail_command, input=mail_body, text=True) + + +def check_trigger_files(filenames, test_options, controls): + """ + Check that the fit coefficients meet criteria set + """ + coeff_upper_limit = float(test_options['upper-limit-coefficient']) + coeff_lower_limit = float(test_options['lower-limit-coefficient']) + warnings = [] + warning_files = [] + for filename in filenames: + warnings_thisfile = [] + with HFile(filename, 'r') as trff: + ifos = [k for k in trff.keys() if not k.startswith('bins')] + fit_coeffs = {ifo: trff[ifo]['fit_coeff'][:] for ifo in ifos} + bins_upper = trff['bins_upper'][:] + bins_lower = trff['bins_lower'][:] + # Which bins have at least *some* triggers within the limit + use_bins = bins_lower > float(test_options['duration-bin-lower-limit']) + for ifo in ifos: + coeffs_above = fit_coeffs[ifo][use_bins] > coeff_upper_limit + coeffs_below = fit_coeffs[ifo][use_bins] < coeff_lower_limit + if not any(coeffs_above) and not any(coeffs_below): + continue + # Problem - the fit coefficient is outside the limits + for bl, bu, fc in zip(bins_lower[use_bins], bins_upper[use_bins], + fit_coeffs[ifo][use_bins]): + if fc < coeff_lower_limit or fc > coeff_upper_limit: + warnings_thisfile.append( + f"WARNING - {ifo} fit coefficient {fc:.3f} in bin " + f"{bl}-{bu} outwith limits " + f"{coeff_lower_limit}-{coeff_upper_limit}" + ) + if warnings_thisfile: + warning_files.append(filename) + warnings.append(warnings_thisfile) + + if warnings: + # Some coefficients are outside the range + # Add the fact that this check failed in the logs + logging.warning("Extreme daily fits values found:") + mail_body_lines = ["Extreme daily fits values found:"] + for filename, filewarnings in zip(warning_files, warnings): + logging.warning(filename) + mail_body_lines.append(f"Values in {filename}") + for fw in filewarnings: + logging.warning(" " + fw) + mail_body_lines.append(" " + fw) + mail_volunteers_error( + controls, + mail_body_lines, + 'PyCBC Live single trigger fits extreme value(s)' + ) + + +def run_and_error(command_arguments, controls): + """ + Wrapper around subprocess.run to catch errors and send emails if required + """ + logging.info("Running " + " ".join(command_arguments)) + command_output = subprocess.run(command_arguments, capture_output=True) + if command_output.returncode: + error_contents = [' '.join(command_arguments), + command_output.stderr.decode()] + mail_volunteers_error( + controls, + error_contents, + f"PyCBC live could not run {command_arguments[0]}" + ) + err_msg = f"Could not run {command_arguments[0]}" + raise subprocess.SubprocessError(err_msg) + + +# These are the option used to control the supervision, and will not be passed +# to the subprocesses +control_options = [ + "check-daily-output", + "combined-days", + "mail-volunteers-file", + "output-directory", + "output-id-str", + "public-dir", + "replay-duration", + "replay-start-time", + "submit-dir", + "trfits-format", + "true-start-time", + "variable-trigger-fits", +] + +# these are options which can be taken by both the daily fit code and the +# combined fitting code +options_both = ['ifos', 'verbose'] + +# These options are only for the daily fit code +daily_fit_options = [ + 'cluster', + 'date-directories', + 'duration-bin-edges', + 'duration-bin-spacing', + 'duration-from-bank', + 'file-identifier', + 'fit-function', + 'fit-threshold', + 'num-duration-bins', + 'prune-loudest', + 'prune-stat-threshold', + 'prune-window', + 'sngl-ranking', + 'template-cuts', + 'trigger-cuts', + 'trigger-directory', +] + +combined_fit_options = [ + 'conservative-percentile', +] + +coeff_test_options = [ + 'duration-bin-lower-limit', + 'lower-limit-coefficient', + 'upper-limit-coefficient', +] + +all_options = control_options + options_both + daily_fit_options \ + + combined_fit_options + coeff_test_options + + +def do_fitting(args, day_dt, day_str): + """ + Perform the fits as specified + """ + # Read in the config file and pack into appropriate dictionaries + daily_options = {} + combined_options = {} + test_options = {} + controls = {} + + with open(args.config_file, 'r') as conf_file: + all_lines = conf_file.readlines() + + for line in all_lines: + # Ignore whitespace and comments + line = line.strip() + if not line: + continue + if line.startswith(';'): + continue + + option, value = line.split('=') + option = option.strip() + value = value.strip() + + # If it is a control option, add to the controls dictionary + if option in control_options: + controls[option] = value + + # If the option is not to control the input, then it is passed + # straight to the executable + if option in daily_fit_options or option in options_both: + daily_options[option] = value + + if option in options_both or option in combined_fit_options: + combined_options[option] = value + + if option in coeff_test_options: + test_options[option] = value + + if option not in all_options: + logging.warning("Option %s unrecognised, ignoring", option) + + # The main output directory will have a date subdirectory which we + # put the output into + output_dir = os.path.join(controls['output-directory'], day_str) + subprocess.run(['mkdir', '-p', output_dir]) + if 'public-dir' in controls: + public_dir = os.path.join(controls['public-dir'], *day_str.split('_')) + subprocess.run(['mkdir', '-p', public_dir]) + + if not args.combine_only: + ##### DAILY FITTING ##### + file_id_str = f'{day_str}' + if 'output-id-str' in controls: + file_id_str += f"-{controls['output-id-str']}" + out_fname = f'{file_id_str}-TRIGGER-FITS.hdf' + daily_options['output'] = os.path.join(output_dir, out_fname) + daily_args = ['pycbc_live_single_significance_fits'] + + daily_options['gps-start-time'] = f'{gpstime.utc_to_gps(day_dt).gpsSeconds:d}' + daily_options['gps-end-time'] = f'{gpstime.utc_to_gps(day_dt + timedelta(days=1)).gpsSeconds:d}' + daily_args += dict_to_args(daily_options) + + run_and_error(daily_args, controls) + + # Add plotting for daily fits, and linking to the public directory + logging.info("Plotting daily fits") + daily_plot_output = os.path.join(output_dir, + '{ifo}-' + f'{out_fname[:-3]}png') + daily_plot_arguments = [ + 'pycbc_live_plot_single_significance_fits', + '--trigger-fits-file', + daily_options['output'], + '--output-plot-name-format', + daily_plot_output, + '--log-colormap' + ] + run_and_error(daily_plot_arguments, controls) + + # Link the plots to the public-dir if wanted + if 'public-dir' in controls: + daily_plot_outputs = [daily_plot_output.format(ifo=ifo) for ifo in + daily_options['ifos'].split()] + logging.info("Linking daily fits plots") + for dpo in daily_plot_outputs: + symlink(dpo, public_dir) + + if args.daily_only: + if 'check-daily-output' in controls: + logging.info( + "Checking that fit coefficients above %s for bins above %ss", + test_options['lower-limit-coefficient'], + test_options['duration-bin-lower-limit'] + ) + check_trigger_files( + [daily_options['output']], + test_options, + controls + ) + logging.info('Done') + exit() + + ##### COMBINED FITTING ##### + combined_days = int(controls['combined-days']) + if 'replay-start-time' in controls: + replay_start_time = int(controls['replay-start-time']) + true_start_time = int(controls['true-start-time']) + replay_duration = int(controls['replay-duration']) + dt_replay_start = gpstime.gps_to_utc(replay_start_time) + + td = (day_dt - dt_replay_start).total_seconds() + + # Time since the start of this replay + time_since_replay = np.remainder(td, replay_duration) + + # Add this on to the original start time to get the current time of + # the replay data + dt_true_start = gpstime.gps_to_utc(true_start_time) + + # Original time of the data being replayed right now + current_date = dt_true_start + timedelta(seconds=time_since_replay) + else: + current_date = day_dt + + date_test = current_date + timedelta(days=1) + + logging.info("Finding trigger fit files for combination") + if 'check-daily-output' in controls: + logging.info( + "Checking all files that fit coefficients above %s for bins " + "above %ss", + test_options['lower-limit-coefficient'], + test_options['duration-bin-lower-limit'] + ) + + trfits_files = [] + missed_files = 0 + found_files = 0 + while found_files < combined_days and missed_files < 10: + # Loop through the possible file locations and see if the file exists + date_test -= timedelta(days=1) + date_out = date_test.strftime("%Y_%m_%d") + trfits_filename = controls['trfits-format'].format(date=date_out) + # Check that the file exists: + if not os.path.exists(trfits_filename): + missed_files += 1 + logging.info(f"File {trfits_filename} does not exist - skipping") + continue + if not len(trfits_files): + end_date = date_out + # This is now the oldest file + first_date = date_out + # reset the "missed files" counter, and add to the "found files" + missed_files = 0 + found_files += 1 + trfits_files.append(trfits_filename) + + if 'check-daily-output' in controls: + check_trigger_files(trfits_files, test_options, controls) + + if missed_files == 10: + # If more than 10 days between files, something wrong with analysis. + # warn and use fewer files - 10 here is chosen to be an unusual amount + # of time for the analysis to be down in standard operation + logging.warning('More than 10 days between files, only using ' + f'{found_files} files for combination!') + + file_id_str = f'{first_date}-{end_date}' + if 'output-id-str' in controls: + file_id_str += f"-{controls['output-id-str']}" + out_fname = f'{file_id_str}-TRIGGER_FITS_COMBINED' + combined_options['output'] = os.path.join(output_dir, out_fname + '.hdf') + + if not trfits_files: + raise ValueError("No files meet the criteria") + + combined_options['trfits-files'] = ' '.join(trfits_files) + + combined_args = ['pycbc_live_combine_single_significance_fits'] + combined_args += dict_to_args(combined_options) + + run_and_error(combined_args, controls) + + if 'variable-trigger-fits' in controls: + logging.info('Copying combined fits file to local filesystem') + try: + shutil.copyfile( + combined_options['output'], + controls['variable-trigger-fits'] + ) + except Exception as e: + mail_volunteers_error( + controls, + [str(e)], + "PyCBC live could not copy to variable trigger fits file" + ) + raise e + logging.info( + "%s updated to link to %s", + controls['variable-trigger-fits'], + combined_options['output'] + ) + + logging.info("Plotting combined fits") + # Add plotting for combined fits, and linking to the public directory + combined_plot_output = os.path.join(output_dir, + f"{{ifo}}-{out_fname}-{{type}}.png") + combined_plot_arguments = [ + 'pycbc_live_plot_combined_single_significance_fits', + '--combined-fits-file', + combined_options['output'], + '--output-plot-name-format', + combined_plot_output, + '--log-colormap' + ] + + run_and_error(combined_plot_arguments, controls) + + combined_plot_outputs = [ + combined_plot_output.format(ifo=ifo, type='fit_coeffs') for ifo in + combined_options['ifos'].split() + ] + combined_plot_outputs += [ + combined_plot_output.format(ifo=ifo, type='counts') for ifo in + combined_options['ifos'].split() + ] + + # Link the plots to the public-dir if wanted + if 'public-dir' in controls: + logging.info("Linking combined fits") + for cpo in combined_plot_outputs: + symlink(cpo, public_dir) + + logging.info('Done') + + +def wait_for_utc_time(target_str): + """Wait until the UTC time is as given by `target_str`, in HH:MM:SS format. + """ + target_hour, target_minute, target_second = map(int, target_str.split(':')) + now = datetime.utcnow() + # for today's target, take now and replace the time + target_today = now + relativedelta( + hour=target_hour, minute=target_minute, second=target_second + ) + # for tomorrow's target, take now, add one day, and replace the time + target_tomorrow = now + relativedelta( + days=1, hour=target_hour, minute=target_minute, second=target_second + ) + next_target = target_today if now <= target_today else target_tomorrow + sleep_seconds = (next_target - now).total_seconds() + logging.info('Waiting %.0f s', sleep_seconds) + time.sleep(sleep_seconds) + + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('--config-file', required=True) +parser.add_argument( + '--date', + help='Date to analyse, if not given, will analyse yesterday (UTC). ' + 'Format YYYY_MM_DD. Do not use if using --run-daily-at.' +) +parser.add_argument( + '--combine-only', + action='store_true', + help="Only do the combination of singles fit files." +) +parser.add_argument( + '--daily-only', + action='store_true', + help="Only do the daily singles fitting." +) +parser.add_argument( + '--run-daily-at', + metavar='HH:MM:SS', + help='Stay running and repeat the fitting daily at the given UTC hour.' +) +args = parser.parse_args() + +pycbc.init_logging(True) + +if args.run_daily_at is not None and args.date is not None: + parser.error('Cannot take --run-daily-at and --date at the same time') + +if args.run_daily_at is not None: + # keep running and repeat the fitting every day at the given hour + if not re.match('[0-9][0-9]:[0-9][0-9]:[0-9][0-9]', args.run_daily_at): + parser.error('--run-daily-at takes a UTC time in the format HH:MM:SS') + logging.info('Starting in daily run mode') + while True: + wait_for_utc_time(args.run_daily_at) + logging.info('==== Time to update the single fits, waking up ====') + # Get the date string for yesterday's triggers + day_dt = datetime.utcnow() - timedelta(days=1) + day_str = day_dt.strftime('%Y_%m_%d') + do_fitting(args, day_dt, day_str) +else: + # run just once + if args.date: + day_str = args.date + day_dt = datetime.strptime(args.date, '%Y_%m_%d') + else: + # Get the date string for yesterday's triggers + day_dt = datetime.utcnow() - timedelta(days=1) + day_str = day_dt.strftime('%Y_%m_%d') + do_fitting(args, day_dt, day_str) diff --git a/bin/minifollowups/pycbc_foreground_minifollowup b/bin/minifollowups/pycbc_foreground_minifollowup index b1ce86d1a7a..68d5c392dcc 100644 --- a/bin/minifollowups/pycbc_foreground_minifollowup +++ b/bin/minifollowups/pycbc_foreground_minifollowup @@ -30,9 +30,11 @@ from pycbc.events import select_segments_by_definer, coinc from pycbc.io import get_all_subkeys, HFile import pycbc.workflow.minifollowups as mini from pycbc.workflow.core import resolve_url_to_file +import pycbc.version parser = argparse.ArgumentParser(description=__doc__[1:]) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--statmap-file', diff --git a/bin/minifollowups/pycbc_injection_minifollowup b/bin/minifollowups/pycbc_injection_minifollowup index 871b81f51ab..e2a1557484b 100644 --- a/bin/minifollowups/pycbc_injection_minifollowup +++ b/bin/minifollowups/pycbc_injection_minifollowup @@ -26,6 +26,7 @@ import numpy from pycbc import init_logging, add_common_pycbc_options import pycbc.workflow as wf import pycbc.workflow.minifollowups as mini +import pycbc.version from pycbc.types import MultiDetOptionAction from pycbc.events import select_segments_by_definer, coinc from pycbc.results import layout @@ -121,6 +122,7 @@ def sort_injections(args, inj_group, missed): parser = argparse.ArgumentParser(description=__doc__) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--injection-file', diff --git a/bin/minifollowups/pycbc_page_coincinfo b/bin/minifollowups/pycbc_page_coincinfo index b2dc9e363b7..cdfbbecf784 100644 --- a/bin/minifollowups/pycbc_page_coincinfo +++ b/bin/minifollowups/pycbc_page_coincinfo @@ -27,14 +27,17 @@ import numpy import lal from pycbc import add_common_pycbc_options +import pycbc.version import pycbc.results import pycbc.pnutils from pycbc.io.hdf import HFile -from pycbc.events import ranking, stat as pystat +from pycbc.events import ranking from pycbc.results import followup parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trigger-files', nargs='+', help="HDF format single detector trigger files for the full data run") parser.add_argument('--bank-file', @@ -69,22 +72,11 @@ parser.add_argument('--include-summary-page-link', action='store_true', parser.add_argument('--include-gracedb-link', action='store_true', help="If given, will provide a link to search GraceDB for events " "within a 3s window around the coincidence time.") -parser.add_argument('--max-columns', type=int, - help="Maximum number of columns allowed in the table (not including detector names)") -pystat.insert_statistic_option_group(parser, - default_ranking_statistic='single_ranking_only') + args = parser.parse_args() pycbc.init_logging(args.verbose) -if args.ranking_statistic not in ['quadsum', 'single_ranking_only']: - logging.warning( - "For the coincident info table, we only use single ranking, not %s, " - "this option will be ignored", - args.ranking_statistic - ) - args.ranking_statistic = 'quadsum' - # Get the nth loudest trigger from the output of pycbc_coinc_statmap f = HFile(args.statmap_file, 'r') d = f[args.statmap_file_subspace_name] @@ -157,16 +149,12 @@ statmapfile = d # table. Each entry in data corresponds to each row in the final table and # should be a list of values. So data is will be a list of lists. data = [] -row_labels = [] -rank_method = pystat.get_statistic_from_opts(args, list(files.keys())) - for ifo in files.keys(): # ignore ifo if coinc didn't participate (only for multi-ifo workflow) if (statmapfile['%s/time' % ifo][n] == -1.0): continue - row_labels.append(ifo) d = files[ifo] i = idx[ifo] tid = d['template_id'][i] @@ -176,12 +164,7 @@ for ifo in files.keys(): time = d['end_time'][i] utc = lal.GPSToUTC(int(time))[0:6] - trig_dict = { - k: numpy.array([d[k][i]]) - for k in d.keys() - if not k.endswith('_template') - and k not in ['gating', 'search', 'template_boundaries'] - } + # Headers will store the headers that will appear in the table. headers = [] data.append([]) @@ -190,6 +173,9 @@ for ifo in files.keys(): if args.include_summary_page_link: data[-1].append(pycbc.results.dq.get_summary_page_link(ifo, utc)) headers.append("Detector status") + else: + data[-1].append(ifo) + headers.append("Ifo") # End times data[-1].append(str(datetime.datetime(*utc))) @@ -197,28 +183,14 @@ for ifo in files.keys(): headers.append("UTC End Time") headers.append("GPS End time") - #headers.append("Stat") - # Determine statistic naming - if args.sngl_ranking == "newsnr": - sngl_stat_name = "Reweighted SNR" - elif args.sngl_ranking == "newsnr_sgveto": - sngl_stat_name = "Reweighted SNR (+sgveto)" - elif args.sngl_ranking == "newsnr_sgveto_psdvar": - sngl_stat_name = "Reweighted SNR (+sgveto+psdvar)" - elif args.sngl_ranking == "snr": - sngl_stat_name = "SNR" - else: - sngl_stat_name = args.sngl_ranking - - stat = rank_method.get_sngl_ranking(trig_dict) - headers.append(sngl_stat_name) - data[-1].append('%5.2f' % stat[0]) - # SNR and phase (not showing any single-det stat here) data[-1].append('%5.2f' % d['snr'][i]) data[-1].append('%5.2f' % d['coa_phase'][i]) + #data[-1].append('%5.2f' % ranking.newsnr(d['snr'][i], rchisq)) headers.append("ρ") headers.append("Phase") + #headers.append("Stat") + # Signal-glitch discrimators data[-1].append('%5.2f' % rchisq) data[-1].append('%i' % d['chisq_dof'][i]) @@ -249,12 +221,7 @@ for ifo in files.keys(): headers.append("s2z") headers.append("Duration") -html += str(pycbc.results.static_table( - data, - headers, - columns_max=args.max_columns, - row_labels=row_labels -)) +html += str(pycbc.results.static_table(data, headers)) ############################################################################### pycbc.results.save_fig_with_metadata(html, args.output_file, {}, diff --git a/bin/minifollowups/pycbc_page_injinfo b/bin/minifollowups/pycbc_page_injinfo index e4bf1cfe8e8..f7854e477cb 100644 --- a/bin/minifollowups/pycbc_page_injinfo +++ b/bin/minifollowups/pycbc_page_injinfo @@ -19,8 +19,8 @@ import argparse import sys import numpy +import pycbc.version import pycbc.results -from pycbc import conversions as conv import pycbc.pnutils from pycbc import init_logging, add_common_pycbc_options from pycbc.detector import Detector @@ -28,6 +28,8 @@ from pycbc.io.hdf import HFile parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file') parser.add_argument('--injection-file', required=True, help="The HDF format injection file. Required") @@ -35,8 +37,6 @@ parser.add_argument('--injection-index', type=int, required=True, help="The index of the injection to print out. Required") parser.add_argument('--n-nearest', type=int, help="Optional, used in the title") -parser.add_argument('--max-columns', type=int, - help="Optional, maximum number of columns used for the table") args = parser.parse_args() @@ -68,24 +68,13 @@ labels = { 'spin1z': 's1z', 'spin2x': 's2x', 'spin2y': 's2y', - 'spin2z': 's2z', - 'chieff': 'χeff', - 'chip': 'χp', + 'spin2z': 's2z' } params += ['tc'] m1, m2 = f['injections']['mass1'][iidx], f['injections']['mass2'][iidx] -s1x, s2x = f['injections']['spin1x'][iidx], f['injections']['spin2x'][iidx] -s1y, s2y = f['injections']['spin1y'][iidx], f['injections']['spin2y'][iidx] -s1z, s2z = f['injections']['spin1z'][iidx], f['injections']['spin2z'][iidx] - -derived = {} -derived['mchirp'], derived['eta'] = \ - pycbc.pnutils.mass1_mass2_to_mchirp_eta(m1, m2) -derived['mtotal'] = conv.mtotal_from_mass1_mass2(m1, m2) -derived['chieff'] = conv.chi_eff(m1, m2, s1z, s2z) -derived['chip'] = conv.chi_p(m1, m2, s1x, s1y, s2x, s2y) +mchirp, eta = pycbc.pnutils.mass1_mass2_to_mchirp_eta(m1, m2) if 'optimal_snr' in ' '.join(list(f['injections'].keys())): ifolist = f.attrs['ifos'].split(' ') @@ -96,30 +85,32 @@ else: eff_dist = {} for ifo in ['H1', 'L1', 'V1']: eff_dist[ifo] = Detector(ifo).effective_distance( - f['injections/distance'][iidx], - f['injections/ra'][iidx], - f['injections/dec'][iidx], - f['injections/polarization'][iidx], - f['injections/tc'][iidx], - f['injections/inclination'][iidx] - ) + f['injections/distance'][iidx], + f['injections/ra'][iidx], + f['injections/dec'][iidx], + f['injections/polarization'][iidx], + f['injections/tc'][iidx], + f['injections/inclination'][iidx]) + params += ['dec_chirp_dist', 'eff_dist_h', 'eff_dist_l', 'eff_dist_v'] dec_dist = max(eff_dist['H1'], eff_dist['L1']) dec_chirp_dist = pycbc.pnutils.chirp_distance(dec_dist, mchirp) params += ['mass1', 'mass2', 'mchirp', 'eta', 'ra', 'dec', 'inclination', 'spin1x', 'spin1y', 'spin1z', 'spin2x', 'spin2y', - 'spin2z', 'chieff', 'chip'] + 'spin2z'] for p in params: if p in f['injections']: data += ["%.2f" % f['injections'][p][iidx]] - elif p in derived.keys(): - data += [f'{derived[p]:.2f}'] elif 'eff_dist' in p: ifo = '%s1' % p.split('_')[-1] data += ["%.2f" % eff_dist[ifo.upper()]] + elif p == 'mchirp': + data += ["%.2f" % mchirp] + elif p == 'eta': + data += ["%.2f" % eta] elif p == 'dec_chirp_dist': data += ["%.2f" % dec_chirp_dist] else: @@ -129,7 +120,7 @@ for p in params: headers += [labels[p]] table = numpy.array([data], dtype=str) -html = str(pycbc.results.static_table(table, headers, columns_max=args.max_columns)) +html = str(pycbc.results.static_table(table, headers)) tag = '' if args.n_nearest is not None: diff --git a/bin/minifollowups/pycbc_page_snglinfo b/bin/minifollowups/pycbc_page_snglinfo index 2ce04edbcc1..ff5eed58428 100644 --- a/bin/minifollowups/pycbc_page_snglinfo +++ b/bin/minifollowups/pycbc_page_snglinfo @@ -26,7 +26,7 @@ matplotlib.use('Agg') import lal -import pycbc.events, pycbc.results, pycbc.pnutils +import pycbc.version, pycbc.events, pycbc.results, pycbc.pnutils from pycbc.results import followup from pycbc.events import stat as pystat from pycbc.io import hdf @@ -35,6 +35,8 @@ from pycbc import init_logging, add_common_pycbc_options parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trigger-file', required=True, help="HDF format single detector trigger files for the full " "data run") @@ -68,8 +70,6 @@ parser.add_argument('--include-gracedb-link', action='store_true', parser.add_argument('--significance-file', help="If given, will search for this trigger's id in the file to see if " "stat and p_astro values exists for this trigger.") -parser.add_argument('--max-columns', type=int, - help="Optional. Set a maximum number of columns to be used in the output table") pystat.insert_statistic_option_group(parser, default_ranking_statistic='single_ranking_only') @@ -119,7 +119,7 @@ if args.n_loudest is not None: sngl_file.apply_mask(l[0]) # make a table for the single detector information ############################ -time = sngl_file.end_time[0] +time = sngl_file.end_time utc = lal.GPSToUTC(int(time))[0:6] # Headers here will contain the list of headers that will appear in the @@ -131,8 +131,6 @@ headers = [] # single list that will hold the values to go into the table. data = [[]] -row_labels = [args.instrument] - # DQ summary link if args.include_summary_page_link: data[0].append(pycbc.results.dq.get_summary_page_link(args.instrument, utc)) @@ -145,10 +143,11 @@ headers.append("UTC") headers.append("End time") # SNR and statistic +data[0].append('%5.2f' % sngl_file.snr) +data[0].append('%5.2f' % sngl_file.get_column('coa_phase')) +data[0].append('%5.2f' % stat) headers.append("ρ") -data[0].append('%5.2f' % sngl_file.snr[0]) headers.append("Phase") -data[0].append('%5.2f' % sngl_file.get_column('coa_phase')[0]) # Determine statistic naming if args.sngl_ranking == "newsnr": sngl_stat_name = "Reweighted SNR" @@ -172,31 +171,30 @@ else: ) headers.append(stat_name) -data[0].append('%5.2f' % stat[0]) # Signal-glitch discrimators -data[0].append('%5.2f' % sngl_file.rchisq[0]) -data[0].append('%i' % sngl_file.get_column('chisq_dof')[0]) +data[0].append('%5.2f' % sngl_file.rchisq) +data[0].append('%i' % sngl_file.get_column('chisq_dof')) headers.append("χ2r") headers.append("χ2 bins") try: - data[0].append('%5.2f' % sngl_file.sgchisq[0]) + data[0].append('%5.2f' % sngl_file.sgchisq) headers.append("sgχ2") except: pass try: - data[0].append('%5.2f' % sngl_file.psd_var_val[0]) + data[0].append('%5.2f' % sngl_file.psd_var_val) headers.append("PSD var") except: pass # Template parameters -data[0].append('%5.2f' % sngl_file.mass1[0]) -data[0].append('%5.2f' % sngl_file.mass2[0]) -data[0].append('%5.2f' % sngl_file.mchirp[0]) -data[0].append('%5.2f' % sngl_file.spin1z[0]) -data[0].append('%5.2f' % sngl_file.spin2z[0]) -data[0].append('%5.2f' % sngl_file.template_duration[0]) +data[0].append('%5.2f' % sngl_file.mass1) +data[0].append('%5.2f' % sngl_file.mass2) +data[0].append('%5.2f' % sngl_file.mchirp) +data[0].append('%5.2f' % sngl_file.spin1z) +data[0].append('%5.2f' % sngl_file.spin2z) +data[0].append('%5.2f' % sngl_file.template_duration) headers.append("m1") headers.append("m2") headers.append("Mc") @@ -225,7 +223,7 @@ if args.include_gracedb_link: data[0].append(gdb_search_link) html = pycbc.results.dq.redirect_javascript + \ - str(pycbc.results.static_table(data, headers, row_labels=row_labels, columns_max=args.max_columns)) + str(pycbc.results.static_table(data, headers)) ############################################################################### # Set up default titles and the captions for the file diff --git a/bin/minifollowups/pycbc_plot_trigger_timeseries b/bin/minifollowups/pycbc_plot_trigger_timeseries index e70bad54865..b0506797c65 100644 --- a/bin/minifollowups/pycbc_plot_trigger_timeseries +++ b/bin/minifollowups/pycbc_plot_trigger_timeseries @@ -26,6 +26,7 @@ import pylab import numpy from pycbc import init_logging, add_common_pycbc_options +import pycbc.version import pycbc.results from pycbc.types import MultiDetOptionAction from pycbc.events import ranking @@ -33,6 +34,8 @@ from pycbc.io import HFile, SingleDetTriggers parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trigger-files', nargs='+', action=MultiDetOptionAction, metavar="IFO:FILE", help="The HDF format single detector merged trigger files, in " diff --git a/bin/minifollowups/pycbc_single_template_plot b/bin/minifollowups/pycbc_single_template_plot index f983368ae6c..0673fc1b2a7 100644 --- a/bin/minifollowups/pycbc_single_template_plot +++ b/bin/minifollowups/pycbc_single_template_plot @@ -29,6 +29,8 @@ from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--single-template-file', required=True, help="HDF file containing the SNR and CHISQ timeseries. " " The output of pycbc_single_template") diff --git a/bin/minifollowups/pycbc_sngl_minifollowup b/bin/minifollowups/pycbc_sngl_minifollowup index ea65cfb97c7..206066288ea 100644 --- a/bin/minifollowups/pycbc_sngl_minifollowup +++ b/bin/minifollowups/pycbc_sngl_minifollowup @@ -30,14 +30,16 @@ from pycbc.results import layout from pycbc.types.optparse import MultiDetOptionAction from pycbc.events import select_segments_by_definer import pycbc.workflow.minifollowups as mini +import pycbc.version import pycbc.workflow as wf import pycbc.events from pycbc.workflow.core import resolve_url_to_file -from pycbc.events import stat, veto +from pycbc.events import stat, veto, coinc from pycbc.io import hdf parser = argparse.ArgumentParser(description=__doc__[1:]) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--single-detector-file', @@ -71,8 +73,7 @@ parser.add_argument('--inspiral-data-analyzed-name', "analyzed by each analysis job.") parser.add_argument('--min-sngl-ranking', type=float, default=6.5, help="Minimum sngl-ranking to consider for loudest " - "triggers. Useful for efficiency savings. " - "Default=6.5.") + "triggers. Default=6.5.") parser.add_argument('--non-coinc-time-only', action='store_true', help="If given remove (veto) single-detector triggers " "that occur during a time when at least one other " @@ -176,7 +177,7 @@ if args.veto_file: logging.info('Getting file vetoes') # veto_mask is an array of indices into the trigger arrays # giving the surviving triggers - veto_file_idx, _ = pycbc.events.veto.indices_within_segments( + veto_file_idx, _ = events.veto.indices_within_segments( trigs.end_time, [args.veto_file], ifo=args.instrument, @@ -238,32 +239,48 @@ if args.maximum_duration is not None: logging.info('Finding loudest clustered events') rank_method = stat.get_statistic_from_opts(args, [args.instrument]) -extra_kwargs = stat.parse_statistic_keywords_opt(args.statistic_keywords) - -trigs.mask_to_n_loudest_clustered_events( - rank_method, - n_loudest=num_events, - cluster_window=args.cluster_window, - statistic_kwargs=extra_kwargs, -) +extra_kwargs = {} +for inputstr in args.statistic_keywords: + try: + key, value = inputstr.split(':') + extra_kwargs[key] = value + except ValueError: + err_txt = "--statistic-keywords must take input in the " \ + "form KWARG1:VALUE1 KWARG2:VALUE2 KWARG3:VALUE3 ... " \ + "Received {}".format(args.statistic_keywords) + raise ValueError(err_txt) + +logging.info("Calculating statistic for %d triggers", len(trigs.snr)) +sds = rank_method.single(trigs) +stat = rank_method.rank_stat_single((args.instrument, sds), **extra_kwargs) +logging.info("Clustering events over %.3fs window", args.cluster_window) +cid = coinc.cluster_over_time(stat, trigs.end_time, + args.cluster_window) +trigs.apply_mask(cid) +stat = stat[cid] +if len(trigs.snr) < num_events: + num_events = len(trigs.snr) + +logging.info("Finding the loudest triggers") +loudest_idx = sorted(numpy.argsort(stat)[::-1][:num_events]) +trigs.apply_mask(loudest_idx) +stat = stat[loudest_idx] times = trigs.end_time +tids = trigs.template_id -if isinstance(trigs.mask, numpy.ndarray) and trigs.mask.dtype == bool: - trigger_ids = numpy.flatnonzero(trigs.mask) -else: - trigger_ids = trigs.mask - -trig_stat = trigs.stat # loop over number of loudest events to be followed up -order = trig_stat.argsort()[::-1] +order = stat.argsort()[::-1] for rank, num_event in enumerate(order): - logging.info('Processing event: %s', rank) + logging.info('Processing event: %s', num_event) files = wf.FileList([]) time = times[num_event] ifo_time = '%s:%s' %(args.instrument, str(time)) - tid = trigger_ids[num_event] + if isinstance(trigs.mask, numpy.ndarray) and trigs.mask.dtype == bool: + tid = numpy.flatnonzero(trigs.mask)[num_event] + else: + tid = trigs.mask[num_event] ifo_tid = '%s:%s' %(args.instrument, str(tid)) layouts += (mini.make_sngl_ifo(workflow, sngl_file, tmpltbank_file, diff --git a/bin/minifollowups/pycbc_upload_prep_minifollowup b/bin/minifollowups/pycbc_upload_prep_minifollowup index 737cd6e5130..08fc60839bf 100644 --- a/bin/minifollowups/pycbc_upload_prep_minifollowup +++ b/bin/minifollowups/pycbc_upload_prep_minifollowup @@ -33,9 +33,11 @@ from pycbc.events import select_segments_by_definer, coinc from pycbc.io import get_all_subkeys, HFile import pycbc.workflow.minifollowups as mini from pycbc.workflow.core import resolve_url_to_file, resolve_td_option +import pycbc.version parser = argparse.ArgumentParser(description=__doc__[1:]) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help="HDF format template bank file") parser.add_argument('--statmap-file', diff --git a/bin/plotting/pycbc_banksim_plot_eff_fitting_factor b/bin/plotting/pycbc_banksim_plot_eff_fitting_factor index 4e1d5abc5c2..41230b463fc 100644 --- a/bin/plotting/pycbc_banksim_plot_eff_fitting_factor +++ b/bin/plotting/pycbc_banksim_plot_eff_fitting_factor @@ -40,6 +40,7 @@ __program__ = "pycbc_banksim_plot_eff_fitting_factor" parser = argparse.ArgumentParser(usage='', description=__doc__) add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument('--input-files', nargs='+', default=None, required=True, help="List of input files.") parser.add_argument('--output-file', default=None, required=True, diff --git a/bin/plotting/pycbc_banksim_plot_fitting_factors b/bin/plotting/pycbc_banksim_plot_fitting_factors index 49d78839d2f..32d748c15e4 100644 --- a/bin/plotting/pycbc_banksim_plot_fitting_factors +++ b/bin/plotting/pycbc_banksim_plot_fitting_factors @@ -38,6 +38,7 @@ __program__ = "pycbc_banksim_plot_fitting_factors" parser = argparse.ArgumentParser(usage='', description="Plot fitting factor distribution.") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument('--input-file', default=None, required=True, help="List of input files.") parser.add_argument('--output-file', default=None, required=True, diff --git a/bin/plotting/pycbc_banksim_table_point_injs b/bin/plotting/pycbc_banksim_table_point_injs index d91957aac7a..8aef36e645d 100644 --- a/bin/plotting/pycbc_banksim_table_point_injs +++ b/bin/plotting/pycbc_banksim_table_point_injs @@ -34,6 +34,7 @@ __program__ = "pycbc_banksim_table_point_injs" parser = argparse.ArgumentParser(usage='', description="Plot effective fitting factor vs mass1 and mass2.") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument('--input-files', nargs='+', default=None, required=True, help="List of input files.") parser.add_argument('--directory-links', nargs='+', default=None, diff --git a/bin/plotting/pycbc_create_html_snippet b/bin/plotting/pycbc_create_html_snippet index 47b046eb90a..f7b86dfb9ed 100644 --- a/bin/plotting/pycbc_create_html_snippet +++ b/bin/plotting/pycbc_create_html_snippet @@ -26,6 +26,7 @@ import pycbc.results # parse command line parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file', type=str, help='Path of the output HTML file.') parser.add_argument('--html-text', type=str, diff --git a/bin/plotting/pycbc_ifar_catalog b/bin/plotting/pycbc_ifar_catalog index 9ca1e597f22..a6e055b9eca 100644 --- a/bin/plotting/pycbc_ifar_catalog +++ b/bin/plotting/pycbc_ifar_catalog @@ -25,6 +25,7 @@ import pylab from scipy.stats import norm, poisson import pycbc.results +import pycbc.version from pycbc import conversions from pycbc import init_logging, add_common_pycbc_options from pycbc.io.hdf import HFile @@ -33,6 +34,8 @@ parser = argparse.ArgumentParser(usage='pycbc_ifar_catalog [--options]', description='Plots cumulative IFAR vs count for' ' foreground triggers') add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-files', nargs='+', help='Path to coincident trigger HDF file(s)') parser.add_argument('--output-file', required=True, diff --git a/bin/plotting/pycbc_page_coinc_snrchi b/bin/plotting/pycbc_page_coinc_snrchi index 6df693c3539..1f87e354612 100644 --- a/bin/plotting/pycbc_page_coinc_snrchi +++ b/bin/plotting/pycbc_page_coinc_snrchi @@ -10,6 +10,7 @@ from pycbc.io import ( ) from pycbc import conversions, init_logging, add_common_pycbc_options from pycbc.detector import Detector +import pycbc.version def snr_from_chisq(chisq, newsnr, q=6.): snr = numpy.zeros(len(chisq)) + float(newsnr) @@ -19,6 +20,8 @@ def snr_from_chisq(chisq, newsnr, q=6.): parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--found-injection-file', required=True, help='HDF format found injection file. Required') parser.add_argument('--single-injection-file', required=True, diff --git a/bin/plotting/pycbc_page_dq_table b/bin/plotting/pycbc_page_dq_table index e500ab9e038..db9fba5200d 100644 --- a/bin/plotting/pycbc_page_dq_table +++ b/bin/plotting/pycbc_page_dq_table @@ -8,9 +8,11 @@ import numpy as np import pycbc import pycbc.results +from pycbc.version import git_verbose_msg as version parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=version) parser.add_argument('--ifo', required=True) parser.add_argument('--dq-file', required=True) parser.add_argument('--output-file') diff --git a/bin/plotting/pycbc_page_foreground b/bin/plotting/pycbc_page_foreground index 0144db45e8f..dce5799ae01 100755 --- a/bin/plotting/pycbc_page_foreground +++ b/bin/plotting/pycbc_page_foreground @@ -11,12 +11,15 @@ import numpy import pycbc import pycbc.results +import pycbc.version from pycbc.io import hdf from pycbc.pnutils import mass1_mass2_to_mchirp_eta parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file', required=True) parser.add_argument('--bank-file', required=True) parser.add_argument('--single-detector-triggers', nargs='+') diff --git a/bin/plotting/pycbc_page_foundmissed b/bin/plotting/pycbc_page_foundmissed index f45c959553e..5e3e58e464a 100644 --- a/bin/plotting/pycbc_page_foundmissed +++ b/bin/plotting/pycbc_page_foundmissed @@ -10,7 +10,7 @@ import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plot -import pycbc.results.followup, pycbc.pnutils, pycbc.results +import pycbc.results.followup, pycbc.pnutils, pycbc.results, pycbc.version import pycbc.pnutils from pycbc import init_logging, add_common_pycbc_options from pycbc.detector import Detector @@ -64,6 +64,8 @@ parser.add_argument('--far-type', choices=('inclusive', 'exclusive'), parser.add_argument('--missed-on-top', action='store_true', help="Plot missed injections on top of found ones and " "high FAR on top of low FAR") +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) args = parser.parse_args() init_logging(args.verbose) diff --git a/bin/plotting/pycbc_page_ifar b/bin/plotting/pycbc_page_ifar index 851447507f4..fa092159e41 100644 --- a/bin/plotting/pycbc_page_ifar +++ b/bin/plotting/pycbc_page_ifar @@ -27,6 +27,7 @@ from ligo import segments from pycbc import init_logging, add_common_pycbc_options import pycbc.results +import pycbc.version from pycbc.events import veto from pycbc import conversions as conv from pycbc.io import HFile @@ -51,6 +52,8 @@ parser = argparse.ArgumentParser(usage='pycbc_page_ifar [--options]', 'coincident foreground triggers and a subset of' 'the coincident time slide triggers.') add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file', type=str, required=True, help='Path to coincident trigger HDF file.') parser.add_argument('--output-file', type=str, required=True, diff --git a/bin/plotting/pycbc_page_injtable b/bin/plotting/pycbc_page_injtable index 5cafa75009c..ecfd4b98733 100644 --- a/bin/plotting/pycbc_page_injtable +++ b/bin/plotting/pycbc_page_injtable @@ -10,6 +10,7 @@ import pycbc.results import pycbc.detector import pycbc.pnutils import pycbc.events +import pycbc.version from pycbc.io.hdf import HFile from pycbc import add_common_pycbc_options, init_logging from pycbc.types import MultiDetOptionAction @@ -17,6 +18,8 @@ from pycbc.types import MultiDetOptionAction parser = argparse.ArgumentParser(description=__doc__) add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file', help='HDF File containing the matched injections') parser.add_argument('--single-trigger-files', nargs='*', diff --git a/bin/plotting/pycbc_page_recovery b/bin/plotting/pycbc_page_recovery index 2bc5ed05ace..a482a946fbf 100644 --- a/bin/plotting/pycbc_page_recovery +++ b/bin/plotting/pycbc_page_recovery @@ -5,13 +5,15 @@ import numpy, logging, argparse, sys, matplotlib matplotlib.use("Agg") import matplotlib.pyplot as plot -import pycbc.detector +import pycbc.version, pycbc.detector from pycbc import pnutils, results from pycbc.events import triggers from pycbc.io.hdf import HFile parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument("--injection-file", required=True, help="hdf injection file containing found injections. " "Required") diff --git a/bin/plotting/pycbc_page_segments b/bin/plotting/pycbc_page_segments index 9a3f92ad8a2..447812b9713 100644 --- a/bin/plotting/pycbc_page_segments +++ b/bin/plotting/pycbc_page_segments @@ -11,12 +11,15 @@ import mpld3 import mpld3.plugins from matplotlib.patches import Rectangle +import pycbc.version import pycbc.events from pycbc.results.mpld3_utils import MPLSlide, Tooltip parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--segment-files', nargs='+', help="List of segment files to plot") parser.add_argument('--output-file', help="output html file") diff --git a/bin/plotting/pycbc_page_segplot b/bin/plotting/pycbc_page_segplot index dd595a558b8..17172be0ba8 100644 --- a/bin/plotting/pycbc_page_segplot +++ b/bin/plotting/pycbc_page_segplot @@ -16,7 +16,7 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -import argparse +import argparse, pycbc.version import matplotlib; matplotlib.use('Agg') import matplotlib.pyplot as plt import numpy, pylab, pycbc.events, mpld3, mpld3.plugins @@ -30,10 +30,12 @@ from pycbc.events.veto import get_segment_definer_comments from pycbc.results.color import ifo_color from pycbc.results.mpld3_utils import MPLSlide, LineTooltip from pycbc.workflow import SegFile +import pycbc.version # parse command line parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--segment-files', type=str, nargs="+", help='XML files with a segment definer table to read.') parser.add_argument('--segment-names', type=str, nargs="+", required=False, diff --git a/bin/plotting/pycbc_page_segtable b/bin/plotting/pycbc_page_segtable index 0d7b05b5cb3..08b0597d657 100644 --- a/bin/plotting/pycbc_page_segtable +++ b/bin/plotting/pycbc_page_segtable @@ -28,6 +28,7 @@ from ligo import segments from pycbc.events.veto import get_segment_definer_comments from pycbc.results import save_fig_with_metadata from pycbc.workflow import SegFile +import pycbc.version def powerset_ifos(ifo_set): combo_set = [] @@ -38,6 +39,8 @@ def powerset_ifos(ifo_set): # parse command line parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--segment-files', type=str, nargs="+", help='XML files with a segment definer table to read.') parser.add_argument('--segment-names', type=str, nargs="+", required=False, default="", diff --git a/bin/plotting/pycbc_page_sensitivity b/bin/plotting/pycbc_page_sensitivity index cad5146ec67..f210bed016b 100755 --- a/bin/plotting/pycbc_page_sensitivity +++ b/bin/plotting/pycbc_page_sensitivity @@ -13,12 +13,15 @@ import pylab import pycbc.pnutils import pycbc.results import pycbc +import pycbc.version from pycbc import sensitivity from pycbc import conversions as conv from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file', nargs='+', help="Required. HDF format injection result file or space " "separated list of files") diff --git a/bin/plotting/pycbc_page_snrchi b/bin/plotting/pycbc_page_snrchi index af7956ac1fb..9bd0f0aab5c 100644 --- a/bin/plotting/pycbc_page_snrchi +++ b/bin/plotting/pycbc_page_snrchi @@ -9,6 +9,7 @@ matplotlib.use('Agg') import pylab import pycbc.results +import pycbc.version from pycbc.events import veto from pycbc.io import ( get_chisq_from_file_choice, chisq_choices, SingleDetTriggers, HFile @@ -17,6 +18,8 @@ from pycbc.io import ( parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) parser.add_argument('--trigger-file', help='Single ifo trigger file') +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--veto-file', help='Optional, file of veto segments to remove triggers') parser.add_argument('--segment-name', default=None, type=str, diff --git a/bin/plotting/pycbc_page_snrifar b/bin/plotting/pycbc_page_snrifar index 26689724300..a3d3c3ada23 100644 --- a/bin/plotting/pycbc_page_snrifar +++ b/bin/plotting/pycbc_page_snrifar @@ -12,6 +12,7 @@ from scipy.special import erfc, erfinv from pycbc.io.hdf import HFile import pycbc.results +import pycbc.version from pycbc import conversions as conv def sigma_from_p(p): @@ -40,6 +41,8 @@ far_from_p = numpy.vectorize(_far_from_p) parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file') parser.add_argument('--output-file') parser.add_argument('--not-cumulative', action='store_true') diff --git a/bin/plotting/pycbc_page_snrratehist b/bin/plotting/pycbc_page_snrratehist index 5078698311e..59860a237d4 100755 --- a/bin/plotting/pycbc_page_snrratehist +++ b/bin/plotting/pycbc_page_snrratehist @@ -15,6 +15,7 @@ from scipy.special import erf, erfinv from pycbc.io.hdf import HFile import pycbc.results +import pycbc.version from pycbc import conversions as conv def sigma_from_p(p): @@ -27,6 +28,8 @@ def p_from_sigma(sig): parser = argparse.ArgumentParser() # General required options pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file') parser.add_argument('--output-file') parser.add_argument('--bin-size', type=float) diff --git a/bin/plotting/pycbc_page_template_bin_table b/bin/plotting/pycbc_page_template_bin_table index 7e4c3f662b8..68fec14ec6a 100644 --- a/bin/plotting/pycbc_page_template_bin_table +++ b/bin/plotting/pycbc_page_template_bin_table @@ -8,9 +8,11 @@ import numpy as np import pycbc import pycbc.results +from pycbc.version import git_verbose_msg as version parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=version) parser.add_argument('--ifo', required=True) parser.add_argument('--dq-file', required=True) parser.add_argument('--output-file') diff --git a/bin/plotting/pycbc_page_versioning b/bin/plotting/pycbc_page_versioning index b05ef9ef37e..74fb09d048a 100755 --- a/bin/plotting/pycbc_page_versioning +++ b/bin/plotting/pycbc_page_versioning @@ -9,13 +9,15 @@ pycbc results pages import argparse import logging -import pycbc +import pycbc.version from pycbc import init_logging, add_common_pycbc_options from pycbc.results import (save_fig_with_metadata, html_escape, get_library_version_info, get_code_version_numbers) parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--executables', nargs='+', required=True, help="List of executables to provide version " "information for") diff --git a/bin/plotting/pycbc_page_vetotable b/bin/plotting/pycbc_page_vetotable index 8e7a69a952f..8ae26f8d062 100644 --- a/bin/plotting/pycbc_page_vetotable +++ b/bin/plotting/pycbc_page_vetotable @@ -28,6 +28,7 @@ from ligo.lw import utils import pycbc.results from pycbc.results import save_fig_with_metadata +import pycbc.version from pycbc.io.ligolw import LIGOLWContentHandler @@ -35,6 +36,7 @@ parser = argparse.ArgumentParser(description=__doc__) # add command line options pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=pycbc.version.git_verbose_msg) parser.add_argument('--veto-definer-file', type=str, help='XML files with a veto_definer table to read.') parser.add_argument('--output-file', type=str, diff --git a/bin/plotting/pycbc_plot_bank_bins b/bin/plotting/pycbc_plot_bank_bins index ad19734c17e..740fbfdf40c 100644 --- a/bin/plotting/pycbc_plot_bank_bins +++ b/bin/plotting/pycbc_plot_bank_bins @@ -12,6 +12,7 @@ import inspect from itertools import cycle import pycbc.events, pycbc.pnutils, pycbc.conversions, pycbc.results +import pycbc.version class H5BankFile(h5py.File): @@ -78,6 +79,8 @@ class H5BankFile(h5py.File): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--bank-file', help='hdf format template bank file', required=True) parser.add_argument('--background-bins', nargs='+', diff --git a/bin/plotting/pycbc_plot_bank_corner b/bin/plotting/pycbc_plot_bank_corner index 6d55b2fbcab..52ef6eea29e 100644 --- a/bin/plotting/pycbc_plot_bank_corner +++ b/bin/plotting/pycbc_plot_bank_corner @@ -29,6 +29,8 @@ import logging from textwrap import wrap import pycbc +import pycbc.version +from pycbc import __version__ from pycbc.results.plot import (add_style_opt_to_parser, set_style_from_cli) from pycbc.io import FieldArray, HFile from pycbc.inference import option_utils @@ -50,6 +52,10 @@ parameter_options = conversion_options + _fit_parameters parser = argparse.ArgumentParser(usage='pycbc_plot_bank_corner [--options]', description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", + action="version", + version=__version__, + help="Prints version information.") parser.add_argument("--bank-file", required=True, help="The bank file to read in and plot") @@ -72,13 +78,10 @@ parser.add_argument("--parameters", "property of that parameter will be used. If not " "provided, will plot all of the parameters in the " "bank.") -parser.add_argument( - '--log-parameters', - nargs='+', +parser.add_argument("--log-parameters", + nargs="+", default=[], - help="Which parameters are to be plotted on a log scale? " - "Must be also given in parameters" -) + help="Plot these parameters on a log scale") parser.add_argument('--plot-histogram', action='store_true', help="Plot 1D histograms of parameters on the " @@ -110,11 +113,6 @@ parser.add_argument("--color-parameter", help="Color scatter points according to the parameter given. " "May optionally provide a label in the same way as for " "--parameters. Default=No scatter point coloring.") -parser.add_argument( - '--log-colormap', - action='store_true', - help="Should the colorbar be plotted on a log scale?" -) parser.add_argument('--dpi', type=int, default=200, @@ -129,13 +127,6 @@ parser.add_argument('--title', add_style_opt_to_parser(parser) args = parser.parse_args() -for lp in args.log_parameters: - if not lp in args.parameters: - parser.error( - "--log-parameters should be in --parameters. " - f"{lp} not in [{', '.join(args.parameters)}]" - ) - pycbc.init_logging(args.verbose) set_style_from_cli(args) @@ -165,7 +156,7 @@ if args.fits_file is not None: param = fits_f[p][:].astype(float) # We need to check for the cardinal '-1' value which means # that the fit is invalid - param[param <= 0] = np.nan + param[param <= 0] = 0 if 'count' in p and 'log' not in p else np.nan bank[p] = param logging.info("Got %d templates from the bank", banklen) @@ -246,21 +237,12 @@ if cpar: for p in required_minmax: minval = np.nanmin(bank_fa[p][bank_fa[p] != -np.inf]) maxval = np.nanmax(bank_fa[p][bank_fa[p] != np.inf]) - if (p in args.log_parameters) or (p == cpar and args.log_colormap): - # Extend the range by 10% in log-space - logvalrange = np.log(maxval) - np.log(minval) - if p not in mins: - mins[p] = np.exp(np.log(minval) - 0.05 * logvalrange) - if p not in maxs: - maxs[p] = np.exp(np.log(maxval) + 0.05 * logvalrange) - else: - # Extend the range by 10% - valrange = maxval - minval - if p not in mins: - mins[p] = minval - 0.05 * valrange - if p not in maxs: - maxs[p] = maxval + 0.05 * valrange + valrange = maxval - minval + if p not in mins: + mins[p] = minval - 0.05 * valrange + if p not in maxs: + maxs[p] = maxval + 0.05 * valrange # Deal with non-coloring case: zvals = bank_fa[cpar] if cpar else None @@ -275,7 +257,6 @@ fig, axis_dict = create_multidim_plot( plot_scatter=True, plot_contours=False, scatter_cmap="viridis", - scatter_log_cmap=args.log_colormap, marginal_title=False, marginal_percentiles=[], fill_color='g', @@ -287,7 +268,6 @@ fig, axis_dict = create_multidim_plot( hist_color=hist_color, mins=mins, maxs=maxs, - log_parameters=args.log_parameters, ) title_text = f"{os.path.basename(args.bank_file)}" @@ -323,19 +303,6 @@ for i in range(len(args.parameters)): for s0, s1 in zip(sharex_axes[:-1], sharex_axes[1:]): s0.sharex(s1) -for (p1, p2), ax in axis_dict.items(): - if p1 == p2 and p1 in args.log_parameters: - if p1 == args.parameters[-1] and len(args.parameters) == 2: - # This will be turned on its side, so set _y_ axis to log - ax[0].semilogy() - else: - ax[0].semilogx() - else: - if p1 in args.log_parameters: - ax[0].semilogx() - if p2 in args.log_parameters: - ax[0].semilogy() - logging.info("Plot generated") fig.set_dpi(args.dpi) diff --git a/bin/plotting/pycbc_plot_dq_flag_likelihood b/bin/plotting/pycbc_plot_dq_flag_likelihood index 2e8a483d4fd..5ac749dad78 100644 --- a/bin/plotting/pycbc_plot_dq_flag_likelihood +++ b/bin/plotting/pycbc_plot_dq_flag_likelihood @@ -9,11 +9,13 @@ from matplotlib import use as matplotlib_use from matplotlib import pyplot matplotlib_use('Agg') +from pycbc.version import git_verbose_msg as version import pycbc.results from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--dq-file", required=True) parser.add_argument("--dq-label", required=True) parser.add_argument("--ifo", type=str, required=True) diff --git a/bin/plotting/pycbc_plot_dq_likelihood_vs_time b/bin/plotting/pycbc_plot_dq_likelihood_vs_time index aa0c52f4022..b4740bdebe8 100644 --- a/bin/plotting/pycbc_plot_dq_likelihood_vs_time +++ b/bin/plotting/pycbc_plot_dq_likelihood_vs_time @@ -10,11 +10,13 @@ from matplotlib import use use('Agg') from matplotlib import pyplot +from pycbc.version import git_verbose_msg as version import pycbc.results from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--ifo", type=str, required=True) parser.add_argument("--dq-file", required=True) parser.add_argument('--background-bin', default='all_bin') diff --git a/bin/plotting/pycbc_plot_dq_percentiles b/bin/plotting/pycbc_plot_dq_percentiles index e8c6c29f40e..6e2915a46f1 100644 --- a/bin/plotting/pycbc_plot_dq_percentiles +++ b/bin/plotting/pycbc_plot_dq_percentiles @@ -10,11 +10,13 @@ from matplotlib import use use('Agg') from matplotlib import pyplot +from pycbc.version import git_verbose_msg as version import pycbc.results from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--ifo", type=str,required=True) parser.add_argument("--dq-file", required=True) parser.add_argument('--background-bin', default='all_bin') diff --git a/bin/plotting/pycbc_plot_gating b/bin/plotting/pycbc_plot_gating index d75e877b4fc..573c6fb42f6 100644 --- a/bin/plotting/pycbc_plot_gating +++ b/bin/plotting/pycbc_plot_gating @@ -12,14 +12,16 @@ from matplotlib.patches import Rectangle import mpld3 import mpld3.plugins -import pycbc from pycbc.results.color import ifo_color from pycbc.results.mpld3_utils import MPLSlide +import pycbc.version from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--input-file', nargs='+', required=True, help='Single-detector inspiral HDF5 files to take gating ' 'data from.') diff --git a/bin/plotting/pycbc_plot_hist b/bin/plotting/pycbc_plot_hist index 04ae7eba99e..58899ddeed5 100644 --- a/bin/plotting/pycbc_plot_hist +++ b/bin/plotting/pycbc_plot_hist @@ -12,12 +12,14 @@ use('Agg') from matplotlib import pyplot import pycbc +import pycbc.version import pycbc.results import pycbc.io from pycbc.events import background_bin_from_string, veto, ranking parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) parser.add_argument('--trigger-file', required=True, help="Combined single detector hdf trigger file") parser.add_argument('--veto-file', diff --git a/bin/plotting/pycbc_plot_multiifo_dtphase b/bin/plotting/pycbc_plot_multiifo_dtphase index 90833f966be..923c643b2fc 100755 --- a/bin/plotting/pycbc_plot_multiifo_dtphase +++ b/bin/plotting/pycbc_plot_multiifo_dtphase @@ -26,7 +26,7 @@ matplotlib.use('agg') from matplotlib import pyplot as plt from pycbc.events import coinc_rate -from pycbc import init_logging, add_common_pycbc_options +from pycbc import init_logging, version, add_common_pycbc_options from pycbc.results import save_fig_with_metadata from pycbc.io.hdf import HFile @@ -43,6 +43,8 @@ def marginalise_pdf(pdf, dimensions_to_keep): parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument('--version', action="version", + version=version.git_verbose_msg) parser.add_argument('--input-file', required=True, help="Input phasetd histogram file, made using " "pycbc_multiifo_dtphase") diff --git a/bin/plotting/pycbc_plot_psd_file b/bin/plotting/pycbc_plot_psd_file index 9a612aa4576..127db15785d 100644 --- a/bin/plotting/pycbc_plot_psd_file +++ b/bin/plotting/pycbc_plot_psd_file @@ -11,11 +11,14 @@ import sys import pycbc import pycbc.results import pycbc.psd +import pycbc.version from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument("--psd-files", nargs='+', required=True, help='HDF file(s) containing the PSDs to plot') parser.add_argument('--hdf-group', default=None, diff --git a/bin/plotting/pycbc_plot_qscan b/bin/plotting/pycbc_plot_qscan index e8bfae143d0..ba4ba9b3cdf 100644 --- a/bin/plotting/pycbc_plot_qscan +++ b/bin/plotting/pycbc_plot_qscan @@ -34,6 +34,7 @@ from matplotlib import pyplot as plt from matplotlib.colors import LogNorm import pycbc.strain +import pycbc.version import pycbc.results # https://stackoverflow.com/questions/9978880/python-argument-parser-list-of-list-or-tuple-of-tuples @@ -46,6 +47,8 @@ def t_window(s): parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--output-file', required=True, help='Output plot') parser.add_argument('--center-time', type=float, help='Center plot on the given GPS time. If omitted, use ' diff --git a/bin/plotting/pycbc_plot_range b/bin/plotting/pycbc_plot_range index f107d202b62..5d90c484bb4 100644 --- a/bin/plotting/pycbc_plot_range +++ b/bin/plotting/pycbc_plot_range @@ -11,6 +11,7 @@ import sys import pycbc.results import pycbc.types +import pycbc.version import pycbc.waveform import pycbc.filter from pycbc.io.hdf import HFile @@ -20,6 +21,8 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument("--psd-files", nargs='+', help='HDF file of psds') parser.add_argument("--output-file", help='output file name') parser.add_argument("--mass1", nargs="+", type=float, diff --git a/bin/plotting/pycbc_plot_range_vs_mtot b/bin/plotting/pycbc_plot_range_vs_mtot index f03c7a90b72..a8333e94178 100644 --- a/bin/plotting/pycbc_plot_range_vs_mtot +++ b/bin/plotting/pycbc_plot_range_vs_mtot @@ -11,6 +11,7 @@ import math import pycbc.results import pycbc.types +import pycbc.version import pycbc.waveform import pycbc.filter from pycbc.io.hdf import HFile @@ -20,6 +21,8 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument("--psd-files", nargs='+', help='HDF file of psds') parser.add_argument("--output-file", help='output file name') diff --git a/bin/plotting/pycbc_plot_singles_timefreq b/bin/plotting/pycbc_plot_singles_timefreq index 69d90587765..a5222c90283 100644 --- a/bin/plotting/pycbc_plot_singles_timefreq +++ b/bin/plotting/pycbc_plot_singles_timefreq @@ -38,11 +38,14 @@ import pycbc.events import pycbc.pnutils import pycbc.strain import pycbc.results +import pycbc.version import pycbc.waveform parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--trig-file', required=True, help='HDF5 file containing single triggers') parser.add_argument('--output-file', required=True, help='Output plot') diff --git a/bin/plotting/pycbc_plot_singles_vs_params b/bin/plotting/pycbc_plot_singles_vs_params index 3654c2ab6a5..0fa5620b68c 100644 --- a/bin/plotting/pycbc_plot_singles_vs_params +++ b/bin/plotting/pycbc_plot_singles_vs_params @@ -36,10 +36,13 @@ import pycbc.pnutils import pycbc.events import pycbc.results import pycbc.io +import pycbc.version from pycbc.events import ranking parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--single-trig-file', required=True, help='Path to file containing single-detector triggers in ' 'HDF5 format. Required') diff --git a/bin/plotting/pycbc_plot_throughput b/bin/plotting/pycbc_plot_throughput index 252b09ef9bd..ead55dcff29 100755 --- a/bin/plotting/pycbc_plot_throughput +++ b/bin/plotting/pycbc_plot_throughput @@ -11,10 +11,13 @@ from scipy.stats import hmean import pycbc from pycbc.results.color import ifo_color +import pycbc.version from pycbc.io.hdf import HFile parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--input-file', nargs='+', required=True, help='Single-detector inspiral HDF5 files to get ' 'templates per core.') diff --git a/bin/plotting/pycbc_plot_trigrate b/bin/plotting/pycbc_plot_trigrate index a0c538f2bae..0e324b1ec81 100644 --- a/bin/plotting/pycbc_plot_trigrate +++ b/bin/plotting/pycbc_plot_trigrate @@ -27,6 +27,7 @@ import pycbc from pycbc import io, events, bin_utils, results from pycbc.events import triggers from pycbc.events import ranking +import pycbc.version #### DEFINITIONS AND FUNCTIONS #### @@ -40,6 +41,7 @@ def get_stat(statchoice, trigs): parser = argparse.ArgumentParser(usage="", description="Plot trigger rates") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--trigger-file", help="Input hdf5 file containing single triggers. " "Required") diff --git a/bin/plotting/pycbc_plot_waveform b/bin/plotting/pycbc_plot_waveform index ff6aed5d507..9f94d1aac9d 100644 --- a/bin/plotting/pycbc_plot_waveform +++ b/bin/plotting/pycbc_plot_waveform @@ -25,6 +25,7 @@ from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes from mpl_toolkits.axes_grid1.inset_locator import mark_inset from pycbc import waveform, io +from pycbc import version from pycbc import results from pycbc import init_logging, add_common_pycbc_options from pycbc.fft import ifft @@ -33,6 +34,8 @@ from pycbc.types import TimeSeries, zeros, complex64 parser = argparse.ArgumentParser(usage='', description=__doc__) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=version.git_verbose_msg) parser.add_argument('--output-file', required=True) parser.add_argument("--low-frequency-cutoff", type=float, help="The low frequency cutoff to use for generation.") diff --git a/bin/pycbc_banksim b/bin/pycbc_banksim index 0af218957ec..a5d70dea773 100644 --- a/bin/pycbc_banksim +++ b/bin/pycbc_banksim @@ -35,7 +35,7 @@ from pycbc import DYN_RANGE_FAC from pycbc.types import FrequencySeries, TimeSeries, zeros, complex_same_precision_as from pycbc.filter import match, sigmasq from pycbc.io.ligolw import LIGOLWContentHandler -import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain +import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain, pycbc.version from pycbc.detector import overhead_antenna_pattern as generate_fplus_fcross from pycbc.waveform import TemplateBank @@ -147,6 +147,8 @@ parser = ArgumentParser(description=__doc__) parser.add_argument("--match-file", dest="out_file", metavar="FILE", required=True, help="File to output match results") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) #Template Settings parser.add_argument("--template-file", dest="bank_file", metavar="FILE", diff --git a/bin/pycbc_banksim_combine_banks b/bin/pycbc_banksim_combine_banks index 3a547d71aa4..56f382f19ee 100644 --- a/bin/pycbc_banksim_combine_banks +++ b/bin/pycbc_banksim_combine_banks @@ -28,6 +28,7 @@ import logging from numpy import * import pycbc +import pycbc.version __author__ = "Ian Harry " __program__ = "pycbc_banksim_combine_banks" @@ -36,6 +37,7 @@ __program__ = "pycbc_banksim_combine_banks" _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) +parser.add_argument('--version', action=pycbc.version.Version) pycbc.add_common_pycbc_options(parser) parser.add_argument("-I", "--input-files", nargs='+', help="Explicit list of input files.") diff --git a/bin/pycbc_banksim_match_combine b/bin/pycbc_banksim_match_combine index 652fd0b8813..1d140c3b6bc 100644 --- a/bin/pycbc_banksim_match_combine +++ b/bin/pycbc_banksim_match_combine @@ -22,6 +22,7 @@ a set of injection files. The *order* of the injection files *must* match the bank files, and the number of injections in each must correspond one-to-one. """ +import imp import argparse import numpy as np @@ -32,7 +33,7 @@ from pycbc import pnutils from pycbc.waveform import TemplateBank from pycbc.io.ligolw import LIGOLWContentHandler from pycbc.io.hdf import HFile -from pycbc import load_source + __author__ = "Ian Harry " __version__ = pycbc.version.git_verbose_msg @@ -43,6 +44,7 @@ __program__ = "pycbc_banksim_match_combine" # Read command line options parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument("--version", action="version", version=__version__) pycbc.add_common_pycbc_options(parser) parser.add_argument("--match-files", nargs='+', help="Explicit list of match files.") @@ -136,7 +138,7 @@ for val in trig_par_list: f['trig_params/{}'.format(val)] = trig_params[val] if options.filter_func_file: - modl = load_source('filter_func', options.filter_func_file) + modl = imp.load_source('filter_func', options.filter_func_file) func = modl.filter_injections bool_arr = func(inj_params['mass1'], inj_params['mass2'], inj_params['spin1z'], inj_params['spin2z']) diff --git a/bin/pycbc_banksim_skymax b/bin/pycbc_banksim_skymax index f93fdfa006b..a32bf175cde 100644 --- a/bin/pycbc_banksim_skymax +++ b/bin/pycbc_banksim_skymax @@ -39,7 +39,7 @@ from pycbc.filter import overlap_cplx, matched_filter from pycbc.filter import compute_max_snr_over_sky_loc_stat from pycbc.filter import compute_max_snr_over_sky_loc_stat_no_phase from pycbc.io.ligolw import LIGOLWContentHandler -import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain +import pycbc.psd, pycbc.scheme, pycbc.fft, pycbc.strain, pycbc.version from pycbc.detector import overhead_antenna_pattern as generate_fplus_fcross from pycbc.waveform import TemplateBank @@ -153,6 +153,8 @@ parser = ArgumentParser(description=__doc__) parser.add_argument("--match-file", dest="out_file", metavar="FILE", required=True, help="File to output match results") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) #Template Settings parser.add_argument("--template-file", dest="bank_file", metavar="FILE", diff --git a/bin/pycbc_coinc_time b/bin/pycbc_coinc_time new file mode 100644 index 00000000000..34e4687ea63 --- /dev/null +++ b/bin/pycbc_coinc_time @@ -0,0 +1,187 @@ +#!/bin/env python +import argparse +import logging +import numpy +from dqsegdb.apicalls import dqsegdbQueryTimes as query + +import ligo.segments + +#from pycbc.workflow.segment import cat_to_veto_def_cat as convert_cat +import pycbc.version + +def sane(seg_list): + """ Convert list of len two lists containing strs to segment list """ + segs = ligo.segments.segmentlist([]) + for seg in seg_list: + segs.append(ligo.segments.segment(int(seg[0]), int(seg[1]))) + return segs + +def parse_veto_definer(veto_def_filename): + """ Parse a veto definer file from the filename and return a dictionary + indexed by ifo and veto definer category level. + + Parameters + ---------- + veto_def_filename: str + The path to the veto definer file + + Returns: + parsed_definition: dict + Returns a dictionary first indexed by ifo, then category level, and + finally a list of veto definitions. + """ + from ligo.lw import table, utils as ligolw_utils + from pycbc.io.ligolw import LIGOLWContentHandler as h + + indoc = ligolw_utils.load_filename(veto_def_filename, False, contenthandler=h) + veto_table = table.Table.get_table(indoc, 'veto_definer') + + ifo = veto_table.getColumnByName('ifo') + name = veto_table.getColumnByName('name') + version = numpy.array(veto_table.getColumnByName('version')) + category = numpy.array(veto_table.getColumnByName('category')) + start = numpy.array(veto_table.getColumnByName('start_time')) + end = numpy.array(veto_table.getColumnByName('end_time')) + start_pad = numpy.array(veto_table.getColumnByName('start_pad')) + end_pad = numpy.array(veto_table.getColumnByName('end_pad')) + + data = {} + for i in range(len(veto_table)): + if ifo[i] not in data: + data[ifo[i]] = {} + + if category[i] not in data[ifo[i]]: + data[ifo[i]][category[i]] = [] + + veto_info = {'name': name[i], + 'version': version[i], + 'start': start[i], + 'end': end[i], + 'start_pad': start_pad[i], + 'end_pad': end_pad[i], + } + data[ifo[i]][category[i]].append(veto_info) + return data + +def get_vetoes(veto_def, ifo, server, veto_name, start_default, end_default): + """ Cycle through the veto name string and collect the vetoes for the + selected categories. Return the final segment list + """ + raise ValueError("This code needs updating to work with the new segment " + "interface. If it's still used please fix this, " + "otherwise we can just remove this code.") + veto_segments = ligo.segments.segmentlist([]) + for cat in veto_name: + #cat = convert_cat(cat) + flags = veto_def[ifo][cat] + + for flag in flags: + start = flag['start'] if flag['start'] >= start_default else start_default + end = flag['end'] if flag['end'] !=0 else end_default + + raw_segs = sane(query("https", server, ifo, + flag['name'], flag['version'], + 'active', start, end)[0]['active']) + + for rseg in raw_segs: + s, e = rseg[0] + flag['start_pad'], rseg[1] + flag['end_pad'] + veto_segments.append(ligo.segments.segment(s, e)) + return veto_segments.coalesce() + + +parser = argparse.ArgumentParser() +parser.add_argument('--version', action='version', version=pycbc.version.git_verbose_msg) +pycbc.add_common_pycbc_options(parser) + +parser.add_argument('--gps-start-time', type=int, required=True, + help="integer gps start time") +parser.add_argument('--gps-end-time', type=int, required=True, + help="integer gps end time") +parser.add_argument('--veto-definer', type=str, required=True, + help="path to veto definer xml file") +parser.add_argument('--science-veto-levels', type=str, + help="Veto levels to apply by removing strain data before analysis ex. '1' for CAT1 veto") +parser.add_argument('--trigger-veto-levels', type=str, + help="Veto levels to apply by removing triggers from analyzed times ex. '12H' for CAT 1 and CAT2 vetoes plus hardware injections") +parser.add_argument('--segment-server', type=str, + help="segment server string") +parser.add_argument('--science-names', nargs=2, + help="name of the segment flag IFO:NAME:VERSION to use for science") + +group = parser.add_argument_group("pycbc_inspiral options that determine padding and minimum time analyzable.") +group.add_argument('--segment-length', type=int) +group.add_argument('--min-analysis-segments', type=int) +group.add_argument('--pad-data', type=int) +group.add_argument('--segment-start-pad', type=int) +group.add_argument('--segment-end-pad', type=int) + +args = parser.parse_args() + + +analysis_start_pad = args.segment_start_pad + args.pad_data +analysis_end_pad = args.segment_end_pad + args.pad_data +minimum_segment_length = ((args.segment_length - args.segment_start_pad + - args.segment_end_pad) * args.min_analysis_segments + + analysis_start_pad + analysis_end_pad) + +pycbc.init_logging(args.verbose) + +ifo_segs = [] + +veto_def = parse_veto_definer(args.veto_definer) + +# Read in the science segments for the requested time +for science_name in args.science_names: + ifo, name, version = science_name.split(':') + + logging.info("For IFO: %s, querying science time (%s, %s)" % (ifo, name, version)) + segments = sane(query("https", args.segment_server, ifo, name, version, + 'active', args.gps_start_time, args.gps_end_time)[0]['active']) + + #trim segments to the request time + request = ligo.segments.segment(args.gps_start_time, args.gps_end_time) + segments = (ligo.segments.segmentlist([request]) & segments) + + # apply cat 1 vetoes here + logging.info('Found %ss of data' % abs(segments)) + segments = segments.coalesce() + + cat1_segs = get_vetoes(veto_def, ifo, + args.segment_server, + args.science_veto_levels, + args.gps_start_time, + args.gps_end_time, + ).coalesce() + + segments -= cat1_segs + logging.info('Found %ss after applying CAT1 vetoes' % abs(segments)) + # remove short segments, and account for filter padding + logging.info('Removing segments shorter than %ss' % minimum_segment_length) + lsegments = ligo.segments.segmentlist([]) + segments = segments.coalesce() + for seg in segments: + if abs(seg) >= minimum_segment_length: + start = seg[0] + analysis_start_pad + end = seg[1] - analysis_end_pad + lsegments.append(ligo.segments.segment(start, end)) + segments = lsegments + logging.info('Found %ss after applying removing padding / short segments' % abs(segments)) + + # apply vetoes that remove triggers here + segments = segments.coalesce() + vtrig_segs = get_vetoes(veto_def, ifo, + args.segment_server, + args.trigger_veto_levels, + args.gps_start_time, + args.gps_end_time, + ).coalesce() + segments -= vtrig_segs + + logging.info('Found %ss after applying trigger vetoes' % abs(segments)) + segments.coalesce() + + ifo_segs += [segments] + +coinc_time = abs(ifo_segs[0] & ifo_segs[1]) +print("Available Coincident Time from %s-%s" % (args.gps_start_time, args.gps_end_time)) +print("%s seconds, %5.5f days" % (coinc_time, coinc_time / 86400.0)) diff --git a/bin/pycbc_condition_strain b/bin/pycbc_condition_strain index 4f65cc717a3..e9ca518c113 100644 --- a/bin/pycbc_condition_strain +++ b/bin/pycbc_condition_strain @@ -28,6 +28,7 @@ import logging import argparse import pycbc.strain +import pycbc.version import pycbc.frame import pycbc.fft from pycbc.types import float32, float64 @@ -46,6 +47,8 @@ def write_strain(file_name, channel, data): parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument('--output-strain-file', required=True, help='Name of output frame file. The file format is ' 'selected based on the extension (.gwf, .npy, .hdf ' diff --git a/bin/pycbc_convertinjfiletohdf b/bin/pycbc_convertinjfiletohdf index 5b983673ade..57ccac2b6b5 100755 --- a/bin/pycbc_convertinjfiletohdf +++ b/bin/pycbc_convertinjfiletohdf @@ -161,6 +161,8 @@ class LVKNewStyleInjectionSet(object): parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) parser.add_argument('--injection-file', required=True, help="The injection file to load. Must end in '.xml[.gz]' " "and must contain a SimInspiral table") diff --git a/bin/pycbc_create_injections b/bin/pycbc_create_injections index 6b388a34ad1..3fac44c3d66 100644 --- a/bin/pycbc_create_injections +++ b/bin/pycbc_create_injections @@ -117,6 +117,7 @@ import h5py from numpy.random import uniform import pycbc +import pycbc.version from pycbc.inject import InjectionSet from pycbc import distributions from pycbc import transforms @@ -130,6 +131,9 @@ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) configuration.add_workflow_command_line_group(parser) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg, + help='Prints version information.') parser.add_argument('--ninjections', type=int, help='Number of injections to create.') parser.add_argument('--gps-start-time', type=int, help="Alternative to " diff --git a/bin/pycbc_data_store b/bin/pycbc_data_store index 087b18fb87d..9fdee6b3eb6 100755 --- a/bin/pycbc_data_store +++ b/bin/pycbc_data_store @@ -8,6 +8,7 @@ import numpy import pycbc import pycbc.strain import pycbc.dq +from pycbc.version import git_verbose_msg as version from pycbc.fft.fftw import set_measure_level from pycbc.events.veto import segments_to_start_end from pycbc.io.hdf import HFile @@ -16,6 +17,7 @@ set_measure_level(0) parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=version) parser.add_argument("--science-name", help="Science flag definition") parser.add_argument("--segment-server") parser.add_argument("--veto-definer-file") diff --git a/bin/pycbc_faithsim b/bin/pycbc_faithsim index 694b818ac46..e1bf979a15a 100644 --- a/bin/pycbc_faithsim +++ b/bin/pycbc_faithsim @@ -31,6 +31,7 @@ import sys from ligo.lw import utils as ligolw_utils from ligo.lw import lsctables +import pycbc.version import pycbc.strain import pycbc.psd from pycbc.waveform import td_approximants, fd_approximants @@ -82,6 +83,8 @@ psd_names = pycbc.psd.get_lalsim_psd_list() taper_choices = ["start","end","startend"] parser = argparse.ArgumentParser(usage='', description="Calculate faithfulness for a set of waveforms.") +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument("--param-file", dest="bank_file", metavar="FILE", help="Sngl or Sim Inspiral Table containing waveform " diff --git a/bin/pycbc_fit_sngl_trigs b/bin/pycbc_fit_sngl_trigs index 7acde3384d3..c449e41321f 100644 --- a/bin/pycbc_fit_sngl_trigs +++ b/bin/pycbc_fit_sngl_trigs @@ -20,10 +20,10 @@ use('Agg') from matplotlib import pyplot as plt import numpy as np -import pycbc from pycbc import io, events, bin_utils from pycbc.events import ranking from pycbc.events import trigger_fits as trstats +import pycbc.version #### DEFINITIONS AND FUNCTIONS #### @@ -56,6 +56,7 @@ parser = argparse.ArgumentParser(usage="", description="Perform maximum-likelihood fits of single inspiral trigger" "distributions to various functions") pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument("--inputs", nargs="+", help="Input file or space-separated list of input files " "containing single triggers. Currently .xml(.gz) " diff --git a/bin/pycbc_hdf5_splitbank b/bin/pycbc_hdf5_splitbank index ad50d57aed9..d080d5774bb 100755 --- a/bin/pycbc_hdf5_splitbank +++ b/bin/pycbc_hdf5_splitbank @@ -27,13 +27,15 @@ import h5py import logging from numpy import random -import pycbc +import pycbc, pycbc.version from pycbc.waveform import bank __author__ = "Soumi De " parser = argparse.ArgumentParser(description=__doc__[1:]) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument("--bank-file", type=str, help="Bank hdf file to load.") outbanks = parser.add_mutually_exclusive_group(required=True) diff --git a/bin/pycbc_hdf_splitinj b/bin/pycbc_hdf_splitinj index 288ecee39a6..1026a6f0fe9 100644 --- a/bin/pycbc_hdf_splitinj +++ b/bin/pycbc_hdf_splitinj @@ -8,14 +8,16 @@ Split sets are organized to maximize time between injections. import argparse import numpy as np -import pycbc from pycbc.inject import InjectionSet +import pycbc.version from pycbc.io.hdf import HFile # Parse command line parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) parser.add_argument("-f", "--output-files", nargs='*', required=True, help="Names of output files") parser.add_argument("-i", "--input-file", required=True, diff --git a/bin/pycbc_inj_cut b/bin/pycbc_inj_cut index bdcf07d994f..1ca70bcb5ca 100644 --- a/bin/pycbc_inj_cut +++ b/bin/pycbc_inj_cut @@ -36,9 +36,11 @@ from ligo.lw import lsctables import pycbc import pycbc.inject from pycbc.types import MultiDetOptionAction +import pycbc.version parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action=pycbc.version.Version) parser.add_argument('--input', dest='inj_xml', required=True, help='Input LIGOLW injections file.') parser.add_argument('--output-missed', dest='output_missed', required=False, help="Output LIGOLW file containing injections we expect to miss.") diff --git a/bin/pycbc_inspiral b/bin/pycbc_inspiral index 61e769ac2e7..c7ea379cfe7 100644 --- a/bin/pycbc_inspiral +++ b/bin/pycbc_inspiral @@ -27,6 +27,7 @@ import time from multiprocessing import Pool import pycbc +import pycbc.version from pycbc import vetoes, psd, waveform, strain, scheme, fft, DYN_RANGE_FAC, events from pycbc.vetoes.sgchisq import SingleDetSGChisq from pycbc.filter import MatchedFilterControl, make_frequency_series, qtransform @@ -53,6 +54,7 @@ parser = argparse.ArgumentParser(usage='', description="Find single detector gravitational-wave triggers.") pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action=pycbc.version.Version) parser.add_argument("--update-progress", help="updates a file 'progress.txt' with a value 0 .. 1.0 when this amount of (filtering) progress was made", type=float, default=0) diff --git a/bin/pycbc_live b/bin/pycbc_live index 52a0de2433a..2595adf44b4 100755 --- a/bin/pycbc_live +++ b/bin/pycbc_live @@ -330,7 +330,7 @@ class LiveEventManager(object): * self.bank.sample_rate) flen = int(tlen / 2 + 1) delta_f = self.bank.sample_rate / float(tlen) - cmd = f'timeout {args.snr_opt_timeout} ' + cmd = 'timeout {} '.format(args.snr_opt_timeout) exepath = which('pycbc_optimize_snr') cmd += exepath + ' ' @@ -342,16 +342,17 @@ class LiveEventManager(object): data_fils_str = '--data-files ' psd_fils_str = '--psd-files ' for ifo in live_ifos: - curr_fname = fname.replace( - '.xml.gz', f'_{ifo}_data_overwhitened.hdf' - ) + curr_fname = \ + fname.replace('.xml.gz', + '_{}_data_overwhitened.hdf'.format(ifo)) curr_data = self.data_readers[ifo].overwhitened_data(delta_f) curr_data.save(curr_fname) - data_fils_str += f'{ifo}:{curr_fname} ' - curr_fname = fname.replace('.xml.gz', f'_{ifo}_psd.hdf') + data_fils_str += '{}:{} ' .format(ifo, curr_fname) + curr_fname = fname.replace('.xml.gz', + '_{}_psd.hdf'.format(ifo)) curr_psd = curr_data.psd curr_psd.save(curr_fname) - psd_fils_str += f'{ifo}:{curr_fname} ' + psd_fils_str += '{}:{} ' .format(ifo, curr_fname) cmd += data_fils_str cmd += psd_fils_str @@ -385,10 +386,10 @@ class LiveEventManager(object): 'mc_area_args/', self.mc_area_args) - cmd += f'--params-file {curr_fname} ' - cmd += f'--approximant {apr} ' - cmd += f'--gracedb-server {self.gracedb_server} ' - cmd += f'--gracedb-search {self.gracedb_search} ' + cmd += '--params-file {} '.format(curr_fname) + cmd += '--approximant {} '.format(apr) + cmd += '--gracedb-server {} '.format(self.gracedb_server) + cmd += '--gracedb-search {} '.format(self.gracedb_search) labels = self.snr_opt_label labels += ' '.join(self.gracedb_labels or []) @@ -407,7 +408,7 @@ class LiveEventManager(object): cmd += '--enable-gracedb-upload ' if self.fu_cores: - cmd += f'--cores {self.fu_cores} ' + cmd += '--cores {} '.format(self.fu_cores) if args.processing_scheme: # we will use the cores for multiple workers of the @@ -419,11 +420,11 @@ class LiveEventManager(object): # unlikely to benefit from a processing scheme with more # than 1 thread anyway. opt_scheme = args.processing_scheme.split(':')[0] - cmd += f'--processing-scheme {opt_scheme}:1 ' + cmd += '--processing-scheme {}:1 '.format(opt_scheme) # Save the command which would be used: snroc_fname = os.path.join(out_dir_path, 'snr_optimize_command.txt') - with open(snroc_fname, 'w') as snroc_file: + with open(snroc_fname,'w') as snroc_file: snroc_file.write(cmd) return cmd, out_dir_path @@ -489,10 +490,10 @@ class LiveEventManager(object): if optimize_snr_checks: logging.info('Optimizing SNR for event above threshold ..') self.run_optimize_snr( - cmd, - out_dir_path, - fname.replace('.xml.gz', '_attributes.hdf'), - gid + cmd, + out_dir_path, + fname.replace('.xml.gz', '_attributes.hdf'), + gid ) def check_coincs(self, ifos, coinc_results, psds): @@ -571,7 +572,8 @@ class LiveEventManager(object): # prevent singles being uploaded as well for coinc events self.last_few_coincs_uploaded.append(event.merger_time) # Only need to keep a few (10) events - self.last_few_coincs_uploaded = self.last_few_coincs_uploaded[-10:] + self.last_few_coincs_uploaded = \ + self.last_few_coincs_uploaded[-10:] # Save the event if not upload_checks: @@ -771,7 +773,8 @@ class LiveEventManager(object): for ifo in results: for k in results[ifo]: - f[f'{ifo}/{k}'] = h5py_unicode_workaround(results[ifo][k]) + f['%s/%s' % (ifo, k)] = \ + h5py_unicode_workaround(results[ifo][k]) for key in raw_results: f[key] = h5py_unicode_workaround(raw_results[key]) @@ -794,11 +797,12 @@ class LiveEventManager(object): gate_dtype = [('center_time', float), ('zero_half_width', float), ('taper_width', float)] - f[f'{ifo}/gates'] = numpy.array(gates[ifo], dtype=gate_dtype) + f['{}/gates'.format(ifo)] = \ + numpy.array(gates[ifo], dtype=gate_dtype) for ifo in (store_psd or {}): if store_psd[ifo] is not None: - store_psd[ifo].save(fname, group=f'{ifo}/psd') + store_psd[ifo].save(fname, group='%s/psd' % ifo) def check_max_length(args, waveforms): @@ -817,7 +821,7 @@ def check_max_length(args, waveforms): parser = argparse.ArgumentParser(description=__doc__) pycbc.waveform.bank.add_approximant_arg(parser) -parser.add_argument('--verbose', action='count') +parser.add_argument('--verbose', action='store_true') parser.add_argument('--version', action='version', version=version.git_verbose_msg) parser.add_argument('--bank-file', required=True, help="Template bank file in XML or HDF format") @@ -848,8 +852,6 @@ parser.add_argument('--idq-state-channel', action=MultiDetMultiColonOptionAction parser.add_argument('--idq-threshold', type=float, help='Threshold used to veto triggers at times of ' 'low iDQ False Alarm Probability') -parser.add_argument('--idq-reweighting', action='store_true',default=False, - help='Reweight triggers based on iDQ False Alarm Probability') parser.add_argument('--data-quality-channel', action=MultiDetMultiColonOptionAction, help="Channel containing data quality information. Used " @@ -1046,9 +1048,6 @@ parser.add_argument('--psd-variation', action='store_true', "values for each single detector triggers found by " "the search. Required when using a single detector " "ranking statistic that includes psd variation.") -parser.add_argument("--statistic-refresh-rate", type=float, - help="How often to refresh the statistic object, " - "in seconds. If omitted, no refreshing is done.") scheme.insert_processing_option_group(parser) LiveSingle.insert_args(parser) @@ -1070,14 +1069,9 @@ if not args.enable_gracedb_upload and args.enable_single_detector_upload: parser.error('You are not allowed to enable single ifo upload without the ' '--enable-gracedb-upload option!') -# Configure the log messages so that they are prefixed by the timestamp, the -# hostname of the originating node and the MPI rank of the originating process -pycbc.init_logging( - args.verbose, - format='%(asctime)s {} {} %(message)s'.format( - platform.node(), mpi.COMM_WORLD.Get_rank() - ) -) +log_format = '%(asctime)s {} {} %(message)s'.format(platform.node(), + mpi.COMM_WORLD.Get_rank()) +pycbc.init_logging(args.verbose, format=log_format) ctx = scheme.from_cli(args) fft.from_cli(args) @@ -1087,13 +1081,8 @@ valid_pad = args.analysis_chunk total_pad = args.trim_padding * 2 + valid_pad lfc = None if args.enable_bank_start_frequency else args.low_frequency_cutoff bank = waveform.LiveFilterBank( - args.bank_file, - args.sample_rate, - total_pad, - low_frequency_cutoff=lfc, - approximant=args.approximant, - increment=args.increment -) + args.bank_file, args.sample_rate, total_pad, low_frequency_cutoff=lfc, + approximant=args.approximant, increment=args.increment) if bank.min_f_lower < args.low_frequency_cutoff: parser.error('--low-frequency-cutoff ({} Hz) must not be larger than the ' 'minimum f_lower across all templates ' @@ -1146,16 +1135,12 @@ with ctx: bank.table.sort(order='mchirp') waveforms = list(bank[evnt.rank-1::evnt.size-1]) check_max_length(args, waveforms) - mf = LiveBatchMatchedFilter( - waveforms, - args.snr_threshold, - args.chisq_bins, - sg_chisq, - snr_abort_threshold=args.snr_abort_threshold, - newsnr_threshold=args.newsnr_threshold, - max_triggers_in_batch=args.max_triggers_in_batch, - maxelements=args.max_batch_size - ) + mf = LiveBatchMatchedFilter(waveforms, args.snr_threshold, + args.chisq_bins, sg_chisq, + snr_abort_threshold=args.snr_abort_threshold, + newsnr_threshold=args.newsnr_threshold, + max_triggers_in_batch=args.max_triggers_in_batch, + maxelements=args.max_batch_size) # Synchronize start time if not provided on the command line if not args.start_time: @@ -1181,8 +1166,6 @@ with ctx: if analyze_singles and evnt.rank == 0: sngl_estimator = {ifo: LiveSingle.from_cli(args, ifo) for ifo in evnt.trigg_ifos} - for estim in sngl_estimator.values(): - estim.start_refresh_thread() # Create double coincident background estimator # for every pair of triggering interferometers @@ -1200,13 +1183,8 @@ with ctx: global my_coinc_id my_coinc_id = i c = estimators[my_coinc_id] - setproctitle( - 'PyCBC Live {} bg estimator'.format(ppdets(c.ifos, '-')) - ) - - def estimator_refresh_threads(_): - c = estimators[my_coinc_id] - c.start_refresh_thread() + setproctitle('PyCBC Live {} bg estimator'.format( + ppdets(c.ifos, '-'))) def get_coinc(results): c = estimators[my_coinc_id] @@ -1222,7 +1200,6 @@ with ctx: coinc_pool = BroadcastPool(len(estimators)) coinc_pool.allmap(set_coinc_id, range(len(estimators))) - coinc_pool.broadcast(estimator_refresh_threads, None) logging.info('Starting') @@ -1313,35 +1290,17 @@ with ctx: if len(results[ifo][key]): results[ifo][key] = results[ifo][key][idx] if data_reader[ifo].idq is not None: - logging.info("Reading %s's iDQ information", ifo) + logging.info("Checking %s's iDQ information", ifo) start = data_reader[ifo].start_time times = results[ifo]['end_time'] - flag_active = data_reader[ifo].idq.flag_at_times( + idx = data_reader[ifo].idq.indices_of_flag( start, valid_pad, times, - padding=data_reader[ifo].dq_padding - ) - - if args.idq_reweighting: - logging.info( - 'iDQ flagged %d/%d %s triggers', - numpy.sum(flag_active), - len(times), - ifo - ) - results[ifo]['dq_state'] = flag_active.astype(int) - else: - # use idq as a veto - keep = numpy.logical_not(flag_active) - logging.info( - 'Keeping %d/%d %s triggers after iDQ', - numpy.sum(keep), - len(times), - ifo - ) - for key in results[ifo]: - if len(results[ifo][key]): - results[ifo][key] = \ - results[ifo][key][keep] + padding=data_reader[ifo].dq_padding) + logging.info('Keeping %d/%d %s triggers after iDQ', + len(idx), len(times), ifo) + for key in results[ifo]: + if len(results[ifo][key]): + results[ifo][key] = results[ifo][key][idx] # Calculate and add the psd variation for the results if args.psd_variation: @@ -1469,5 +1428,3 @@ if evnt.rank == 1: if args.enable_profiling is not None and evnt.rank == args.enable_profiling: pr.dump_stats(f'profiling_rank_{evnt.rank:03d}') - -logging.info("Exiting as the end time has been reached") diff --git a/bin/pycbc_make_html_page b/bin/pycbc_make_html_page index 4b6d9c9e893..f4ea0f2e809 100644 --- a/bin/pycbc_make_html_page +++ b/bin/pycbc_make_html_page @@ -30,6 +30,7 @@ from ligo import segments import pycbc.results from pycbc.results.render import get_embedded_config, render_workflow_html_template, setup_template_render from pycbc.workflow import segment +import pycbc.version def examine_dir(cwd): """ @@ -166,6 +167,8 @@ default_logo_location = "https://raw.githubusercontent.com/gwastro/" + \ parser = argparse.ArgumentParser(usage='pycbc_make_html_page \ [--options]', description="Create static html pages of a filesystem's content.") +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('-f', '--template-file', type=str, help='Template file to use for skeleton html page.') diff --git a/bin/pycbc_make_skymap b/bin/pycbc_make_skymap index ad62ce6fb71..cd522665ce5 100755 --- a/bin/pycbc_make_skymap +++ b/bin/pycbc_make_skymap @@ -481,6 +481,7 @@ if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) + parser.add_argument('--version', action=pycbc.version.Version) # note that I am not using a MultiDetOptionAction for --trig-time as I # explicitly want to handle cases like `--trig-time 1234` and # `--trig-time H1:1234 L1:1234` in different ways diff --git a/bin/pycbc_merge_inj_hdf b/bin/pycbc_merge_inj_hdf index b09b0dce1a2..9b208738d47 100755 --- a/bin/pycbc_merge_inj_hdf +++ b/bin/pycbc_merge_inj_hdf @@ -27,6 +27,7 @@ import h5py import pycbc import pycbc.inject +import pycbc.version def get_gc_end_time(injection): @@ -44,6 +45,7 @@ def get_gc_end_time(injection): if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) + parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument('--injection-files', '-i', dest='injection_file', required=True, nargs='+', help='Input HDF5 files defining injections') diff --git a/bin/pycbc_multi_inspiral b/bin/pycbc_multi_inspiral index 114ed7fa88c..6737d99736c 100755 --- a/bin/pycbc_multi_inspiral +++ b/bin/pycbc_multi_inspiral @@ -30,6 +30,7 @@ import time import argparse import numpy as np +import pycbc.version from pycbc import ( detector, fft, @@ -119,6 +120,7 @@ def slide_limiter(args): # pycbc_multi_inspiral executable. time_init = time.time() parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('--version', action=pycbc.version.Version) add_common_pycbc_options(parser) parser.add_argument("--output", type=str) parser.add_argument( diff --git a/bin/pycbc_optimal_snr b/bin/pycbc_optimal_snr index 08fa83c4ae9..ff10f857bb7 100644 --- a/bin/pycbc_optimal_snr +++ b/bin/pycbc_optimal_snr @@ -33,6 +33,7 @@ from ligo.lw import lsctables import pycbc import pycbc.inject import pycbc.psd +import pycbc.version from pycbc.filter import sigma, make_frequency_series from pycbc.types import TimeSeries, FrequencySeries, zeros, float32, \ MultiDetOptionAction, load_frequencyseries @@ -119,6 +120,7 @@ def get_gc_end_time(injection): if __name__ == '__main__': parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) + parser.add_argument("--version", action=pycbc.version.Version) parser.add_argument('--input-file', '-i', dest='injection_file', required=True, help='Input LIGOLW file defining injections') diff --git a/bin/pycbc_optimize_snr b/bin/pycbc_optimize_snr index 6ce3abb9ba0..11c9bcb91e7 100755 --- a/bin/pycbc_optimize_snr +++ b/bin/pycbc_optimize_snr @@ -14,7 +14,7 @@ mpl_use_backend('agg') import pycbc from pycbc import ( - fft, scheme + fft, scheme, version ) from pycbc.types import MultiDetOptionAction, load_frequencyseries import pycbc.conversions as cv @@ -27,6 +27,8 @@ from pycbc.live import snr_optimizer parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=version.git_verbose_msg) parser.add_argument('--params-file', required=True, help='Location of the attributes file created by PyCBC ' 'Live') diff --git a/bin/pycbc_single_template b/bin/pycbc_single_template index 5aaff2787f4..42a21dc6909 100755 --- a/bin/pycbc_single_template +++ b/bin/pycbc_single_template @@ -31,6 +31,7 @@ from pycbc.types import zeros, complex64 from pycbc.types import complex_same_precision_as from pycbc.detector import Detector import pycbc.waveform.utils +import pycbc.version def subtract_template(stilde, template, snr, trigger_time, flow): idx = int((trigger_time - snr.start_time) / snr.delta_t) @@ -91,6 +92,7 @@ def select_segments(fname, anal_name, data_name, ifo, time, pad_data): parser = argparse.ArgumentParser(usage='', description="Single template gravitational-wave followup") pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action=pycbc.version.Version) parser.add_argument('--output-file', required=True) parser.add_argument('--subtract-template', action='store_true') parser.add_argument("--low-frequency-cutoff", type=float, diff --git a/bin/pycbc_source_probability_offline b/bin/pycbc_source_probability_offline index 3122d4583f3..f12447603ef 100755 --- a/bin/pycbc_source_probability_offline +++ b/bin/pycbc_source_probability_offline @@ -29,6 +29,8 @@ parser.add_argument('--ifar-threshold', type=float, default=None, 'above threshold.') parser.add_argument('--include-mass-gap', action='store_true', help='Option to include the Mass Gap region.') +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) mchirp_area.insert_args(parser) args = parser.parse_args() diff --git a/bin/pycbc_split_inspinj b/bin/pycbc_split_inspinj index 7cfaef6143b..28e5ee9c1ad 100644 --- a/bin/pycbc_split_inspinj +++ b/bin/pycbc_split_inspinj @@ -6,12 +6,15 @@ from ligo.lw import utils as ligolw_utils from ligo.lw import lsctables from itertools import cycle -import pycbc +import pycbc.version from pycbc.io.ligolw import LIGOLWContentHandler, get_table_columns + # Parse command line parser = argparse.ArgumentParser() pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", + version=pycbc.version.git_verbose_msg) group = parser.add_mutually_exclusive_group(required=True) group.add_argument("-n", "--num-splits", type=int, help="Number of files to be generated") diff --git a/bin/pycbc_splitbank b/bin/pycbc_splitbank index ab4f59e6276..8919c31ac2f 100644 --- a/bin/pycbc_splitbank +++ b/bin/pycbc_splitbank @@ -35,6 +35,7 @@ from ligo.lw import lsctables from ligo.lw import utils as ligolw_utils import pycbc +from pycbc import version from pycbc.io.ligolw import LIGOLWContentHandler, create_process_table from pycbc.conversions import mchirp_from_mass1_mass2 from pycbc.pnutils import frequency_cutoff_from_name @@ -46,6 +47,8 @@ __program__ = "pycbc_splitbank" # Command line parsing parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('--version', action='version', version=version.git_verbose_msg) + pycbc.add_common_pycbc_options(parser) group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--templates-per-bank', metavar='SAMPLES', diff --git a/bin/pygrb/pycbc_grb_inj_finder b/bin/pygrb/pycbc_grb_inj_finder index 05f6eebef51..0eab460e463 100644 --- a/bin/pygrb/pycbc_grb_inj_finder +++ b/bin/pygrb/pycbc_grb_inj_finder @@ -39,7 +39,7 @@ from gwdatafind.utils import filename_metadata from ligo.segments import segmentlist from ligo.segments.utils import fromsegwizard -from pycbc import add_common_pycbc_options, init_logging +from pycbc import __version__, add_common_pycbc_options, init_logging from pycbc.inject import InjectionSet from pycbc.io.hdf import HFile from pycbc.results.pygrb_postprocessing_utils import template_hash_to_id @@ -155,6 +155,14 @@ parser = argparse.ArgumentParser( add_common_pycbc_options(parser) +parser.add_argument( + "-V", + "--version", + action="version", + version=__version__, + help="show version number and exit", +) + # input/output parser.add_argument( "-f", diff --git a/bin/pygrb/pycbc_grb_trig_cluster b/bin/pygrb/pycbc_grb_trig_cluster index 5f0f1007256..92b1cba65f4 100644 --- a/bin/pygrb/pycbc_grb_trig_cluster +++ b/bin/pygrb/pycbc_grb_trig_cluster @@ -34,6 +34,7 @@ import h5py from gwdatafind.utils import filename_metadata +from pycbc import __version__ from pycbc import init_logging, add_common_pycbc_options from pycbc.io.hdf import HFile @@ -120,6 +121,13 @@ parser = argparse.ArgumentParser( ) add_common_pycbc_options(parser) +parser.add_argument( + "-V", + "--version", + action="version", + version=__version__, + help="show version number and exit", +) # clustering parser.add_argument( @@ -168,145 +176,105 @@ outfile = os.path.join( ), ) -# this list contains the indexing of clusters from all slides -all_clusters = [] +# -- generate clustering bins ------------------- + +nbins = int((end - start) // win + 1) +bins = [[] for i in range(nbins)] +loudsnr = numpy.zeros(nbins) +loudtime = numpy.zeros(nbins) +clusters = [] -# load necessary information from all triggers +# -- cluster ------------------------------------ with HFile(args.trig_file, "r") as h5f: - all_times = h5f["network/end_time_gc"][()] - all_snrs = h5f[f"network/{args.rank_column}"][()] - slide_ids = h5f["network/slide_id"][()] + time = h5f["network"]["end_time_gc"][()] + snr = h5f["network"][args.rank_column][()] # empty file (no triggers), so just copy the file -if not all_times.size: +if not time.size: shutil.copyfile(args.trig_file, outfile) msg = "trigger file is empty\n" msg += "copied input file to {}".format(outfile) logging.info(msg) sys.exit(0) -# -- cluster ------------------------------------ - -unique_slide_ids = numpy.unique(slide_ids) -max_slide_id = max(unique_slide_ids) -msg = 'Clustering '+str(len(slide_ids))+' triggers from ' -msg += str(len(unique_slide_ids))+' slides' -logging.info(msg) - -for slide_id in unique_slide_ids: - # indices to slice current slide - slide_id_pos = numpy.where(slide_ids == slide_id)[0] - # all time and snr values for the current slide - time = all_times[slide_id_pos] - snr = all_snrs[slide_id_pos] - - # generate clustering bins - nbins = int((end - start) // win + 1) - bins = [[] for i in range(nbins)] - loudsnr = numpy.zeros(nbins) - loudtime = numpy.zeros(nbins) - # list to index clusters for current slide - clusters = [] - - # find loudest trigger in each bin, for the current slide - for i in tqdm.tqdm(range(time.size), - desc="Initialising bins", - disable=not args.verbose, - total=time.size, - unit='triggers', - **TQDM_KW): - t, s = time[i], snr[i] - idx = int(float(t - start) // win) - bins[idx].append(i) - if s > loudsnr[idx]: - loudsnr[idx] = s - loudtime[idx] = t - - prev = -1 - nxt_ = 1 - first = True - last = False - add_cluster = clusters.append - nclusters = 0 - - # cluster - bar = tqdm.tqdm(bins, - desc="Clustering bins", - disable=not args.verbose, - total=nbins, - unit='bins', - postfix=dict(nclusters=0), - **TQDM_KW) - for i, bin_ in enumerate(bar): - if not bin_: # empty +# find loudest trigger in each bin +for i in tqdm.tqdm(range(time.size), desc="Initialising bins", + disable=not args.verbose, total=time.size, unit='triggers', + **TQDM_KW): + t, s = time[i], snr[i] + idx = int(float(t - start) // win) + bins[idx].append(i) + if s > loudsnr[idx]: + loudsnr[idx] = s + loudtime[idx] = t + +prev = -1 +nxt_ = 1 +first = True +last = False +add_cluster = clusters.append +nclusters = 0 + +# cluster +bar = tqdm.tqdm(bins, desc="Clustering bins", + disable=not args.verbose, total=nbins, unit='bins', + postfix=dict(nclusters=0), **TQDM_KW) +for i, bin_ in enumerate(bar): + if not bin_: # empty + continue + + for idx in bin_: + t, s = time[idx], snr[idx] + + if s < loudsnr[i]: # not loudest in own bin continue - for idx in bin_: - t, s = time[idx], snr[idx] + # check loudest event in previous bin + if not first: + prevt = loudtime[prev] + if prevt and abs(prevt - t) < win and s < loudsnr[prev]: + continue - if s < loudsnr[i]: # not loudest in own bin + # check loudest event in next bin + if not last: + nextt = loudtime[nxt_] + if nextt and abs(nextt - t) < win and s < loudsnr[nxt_]: continue - # check loudest event in previous bin - if not first: - prevt = loudtime[prev] - if prevt and abs(prevt - t) < win and s < loudsnr[prev]: - continue - - # check loudest event in next bin - if not last: - nextt = loudtime[nxt_] - if nextt and abs(nextt - t) < win and s < loudsnr[nxt_]: - continue - - loudest = True - - # check all events in previous bin - if not first and prevt and abs(prevt - t) < win: - for id2 in bins[prev]: - if abs(time[id2] - t) < win and s < snr[id2]: - loudest = False - break - - # check all events in next bin - if loudest and not last and nextt and abs(nextt - t) < win: - for id2 in bins[nxt_]: - if abs(time[id2] - t) < win and s < snr[id2]: - loudest = False - break - - # this is loudest in its vicinity, keep it - if loudest: - add_cluster(idx) - nclusters += 1 - bar.set_postfix(nclusters=nclusters) - - # update things for next time - first = False - last = i == nbins - 1 - prev += 1 - nxt_ += 1 - - bar.update() - - # clusters is the indexing array for a specific slide_id - # all_clusters is the (absolute) indexing of all clustered triggers - # so look up the indices [clusters] within the absolute indexing array - # slide_id_pos which is built at each slide_id - all_clusters += list(slide_id_pos[clusters]) - msg = 'Slide '+str(slide_id)+'/'+str(max_slide_id) - msg += ' has '+str(len(slide_id_pos)) - msg += ' trigers that were clustered to '+str(len(clusters)) - logging.info(msg) + loudest = True + + # check all events in previous bin + if not first and prevt and abs(prevt - t) < win: + for id2 in bins[prev]: + if abs(time[id2] - t) < win and s < snr[id2]: + loudest = False + break + + # check all events in next bin + if loudest and not last and nextt and abs(nextt - t) < win: + for id2 in bins[nxt_]: + if abs(time[id2] - t) < win and s < snr[id2]: + loudest = False + break + + # this is loudest in its vicinity, keep it + if loudest: + add_cluster(idx) + nclusters += 1 + bar.set_postfix(nclusters=nclusters) -logging.info('Total clustered triggers: '+str(len(all_clusters))) + # update things for next time + first = False + last = i == nbins - 1 + prev += 1 + nxt_ += 1 -# -- write output -------------------------------- + bar.update() slice_hdf5( args.trig_file, outfile, - numpy.asarray(all_clusters), + numpy.asarray(clusters), verbose=args.verbose, ) diff --git a/bin/pygrb/pycbc_grb_trig_combiner b/bin/pygrb/pycbc_grb_trig_combiner index af2e5620cb9..65ee3b08bc5 100644 --- a/bin/pygrb/pycbc_grb_trig_combiner +++ b/bin/pygrb/pycbc_grb_trig_combiner @@ -35,7 +35,7 @@ from gwdatafind.utils import (file_segment, filename_metadata) from ligo import segments from ligo.segments.utils import fromsegwizard -from pycbc import add_common_pycbc_options, init_logging +from pycbc import __version__, add_common_pycbc_options, init_logging from pycbc.results.pygrb_postprocessing_utils import template_hash_to_id from pycbc.io.hdf import HFile @@ -342,6 +342,13 @@ parser = argparse.ArgumentParser( ) add_common_pycbc_options(parser) +parser.add_argument( + "-V", + "--version", + action="version", + version=__version__, + help="show version number and exit", +) # tags parser.add_argument( diff --git a/bin/pygrb/pycbc_make_offline_grb_workflow b/bin/pygrb/pycbc_make_offline_grb_workflow index 88d52304b94..e8cb3db28d3 100644 --- a/bin/pygrb/pycbc_make_offline_grb_workflow +++ b/bin/pygrb/pycbc_make_offline_grb_workflow @@ -20,6 +20,13 @@ Make workflow for the archival, targeted, coherent inspiral pipeline. """ +import pycbc.version + +__author__ = "Andrew Williamson " +__version__ = pycbc.version.git_verbose_msg +__date__ = pycbc.version.date +__program__ = "pycbc_make_offline_grb_workflow" + import sys import os import argparse @@ -39,6 +46,7 @@ workflow_name = "pygrb_offline" # Parse command line options and instantiate pycbc workflow object parser = argparse.ArgumentParser() add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) _workflow.add_workflow_command_line_group(parser) _workflow.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/pygrb/pycbc_pygrb_efficiency b/bin/pygrb/pycbc_pygrb_efficiency index ebb40b6eda8..abba4bc8790 100644 --- a/bin/pygrb/pycbc_pygrb_efficiency +++ b/bin/pygrb/pycbc_pygrb_efficiency @@ -92,7 +92,8 @@ def efficiency_with_errs(found_bestnr, num_injections, num_mc_injs=0): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-F", "--trig-file", action="store", required=True, help="Location of off-source trigger file.") parser.add_argument("--onsource-file", action="store", @@ -551,7 +552,7 @@ ax.plot(dist_plot_vals, (fraction_no_mc), 'g-', ax.errorbar(dist_plot_vals, (fraction_no_mc), yerr=[yerr_low_no_mc, yerr_high_no_mc], c='green') marg_eff = fraction_mc -if np.nansum(marg_eff) > 0: +if not np.isnan(marg_eff.sum()): ax.plot(dist_plot_vals, marg_eff, 'r-', label='Marginalised') ax.errorbar(dist_plot_vals, marg_eff, yerr=[yerr_low_mc, yerr_high_mc], c='red') @@ -624,10 +625,10 @@ ax.plot(dist_plot_vals, (fraction_no_mc), 'g-', ax.errorbar(dist_plot_vals, (fraction_no_mc), yerr=[yerr_low_no_mc, yerr_high_no_mc], c='green') marg_eff = fraction_mc -if not np.nansum(marg_eff) > 0: +if not np.isnan(marg_eff.sum()): ax.plot(dist_plot_vals, marg_eff, 'r-', label='Marginalised') ax.errorbar(dist_plot_vals, marg_eff, yerr=[yerr_low, yerr_high], c='red') -if not np.nansum(red_efficiency) > 0: +if not np.isnan(red_efficiency.sum()): ax.plot(dist_plot_vals, red_efficiency, 'm-', label='Inc. counting errors') ax.set_ylim([0, 1]) diff --git a/bin/pygrb/pycbc_pygrb_exclusion_dist_table b/bin/pygrb/pycbc_pygrb_exclusion_dist_table index 873a44c9943..c170aae419a 100644 --- a/bin/pygrb/pycbc_pygrb_exclusion_dist_table +++ b/bin/pygrb/pycbc_pygrb_exclusion_dist_table @@ -32,7 +32,7 @@ __program__ = "pycbc_pygrb_exclusion_dist_table" parser = argparse.ArgumentParser(description=__doc__, formatter_class= argparse.ArgumentDefaultsHelpFormatter) -pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument("--input-files", nargs="+", required=True, help="List of JSON input files" + " output by pycbc_pygrb_efficiency" + @@ -42,8 +42,6 @@ parser.add_argument("--output-file", required=True, " of exclusion distances.") opts = parser.parse_args() -pycbc.init_logging(opts.verbose) - # Load JSON files as a list of dictionaries file_contents = [] for file_name in opts.input_files: diff --git a/bin/pygrb/pycbc_pygrb_grb_info_table b/bin/pygrb/pycbc_pygrb_grb_info_table index 4189a5a44bb..9903886e8b7 100644 --- a/bin/pygrb/pycbc_pygrb_grb_info_table +++ b/bin/pygrb/pycbc_pygrb_grb_info_table @@ -45,6 +45,7 @@ __program__ = "pycbc_pygrb_grb_info_table" parser = argparse.ArgumentParser(description=__doc__, formatter_class= argparse.ArgumentDefaultsHelpFormatter) add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument("--trigger-time", type=int, required=True, help="GPS time of the GRB.") diff --git a/bin/pygrb/pycbc_pygrb_minifollowups b/bin/pygrb/pycbc_pygrb_minifollowups index 1f13c549199..2b89d66e594 100644 --- a/bin/pygrb/pycbc_pygrb_minifollowups +++ b/bin/pygrb/pycbc_pygrb_minifollowups @@ -105,6 +105,8 @@ def make_timeseries_plot(workflow, trig_file, snr_type, central_time, # Main script starts here # ============================================================================= parser = argparse.ArgumentParser(description=__doc__[1:]) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) pycbc.add_common_pycbc_options(parser) parser.add_argument('--trig-file', help="HDF file with the triggers found by PyGRB") diff --git a/bin/pygrb/pycbc_pygrb_page_tables b/bin/pygrb/pycbc_pygrb_page_tables index 02d6d6c5411..5f37f2cc1a9 100755 --- a/bin/pygrb/pycbc_pygrb_page_tables +++ b/bin/pygrb/pycbc_pygrb_page_tables @@ -180,7 +180,8 @@ def load_missed_found_injections(hdf_file, ifos, snr_threshold, bank_file, # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-F", "--offsource-file", action="store", required=True, help="Location of off-source trigger file") parser.add_argument("--onsource-file", action="store", diff --git a/bin/pygrb/pycbc_pygrb_plot_chisq_veto b/bin/pygrb/pycbc_pygrb_plot_chisq_veto index 966798fc0f2..815a9792224 100644 --- a/bin/pygrb/pycbc_pygrb_plot_chisq_veto +++ b/bin/pygrb/pycbc_pygrb_plot_chisq_veto @@ -49,7 +49,7 @@ __program__ = "pycbc_pygrb_plot_chisq_veto" # Functions # ============================================================================= # Function to load trigger data: includes applying cut in reweighted SNR -def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): +def load_data(input_file, ifos, vetoes, opts, injections=False): """Load data from a trigger/injection file""" snr_type = opts.snr_type @@ -71,14 +71,12 @@ def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): # This will eventually become load_injections trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold, - slide_id=slide_id) + rw_snr_threshold=rw_snr_threshold) else: logging.info("Loading triggers...") trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold, - slide_id=slide_id) + rw_snr_threshold=rw_snr_threshold) # Count surviving points num_trigs_or_injs = len(trigs_or_injs['network/reweighted_snr']) @@ -173,7 +171,8 @@ def calculate_contours(trig_data, opts, new_snrs=None): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") @@ -189,9 +188,7 @@ parser.add_argument("--snr-type", default='coherent', 'single'], help="SNR value to plot on x-axis.") ppu.pygrb_add_bestnr_cut_opt(parser) ppu.pygrb_add_bestnr_opts(parser) -ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() -ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -253,12 +250,10 @@ if ifo and ifo not in ifos: raise RuntimeError(err_msg) # Extract trigger data -trig_data = load_data(trig_file, ifos, vetoes, opts, - slide_id=opts.slide_id) +trig_data = load_data(trig_file, ifos, vetoes, opts) # Extract (or initialize) injection data -inj_data = load_data(found_missed_file, ifos, vetoes, opts, - injections=True, slide_id=0) +inj_data = load_data(found_missed_file, ifos, vetoes, opts, injections=True) # Sanity checks if trig_data[snr_type] is None and inj_data[snr_type] is None: diff --git a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr index 5a2b88321e2..d378d1aea96 100644 --- a/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr +++ b/bin/pygrb/pycbc_pygrb_plot_coh_ifosnr @@ -54,7 +54,7 @@ __program__ = "pycbc_pygrb_plot_coh_ifosnr" # Functions # ============================================================================= # Function to load trigger data -def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): +def load_data(input_file, ifos, vetoes, opts, injections=False): """Load data from a trigger/injection file""" # Initialize the dictionary @@ -75,7 +75,6 @@ def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): ifos, vetoes, rw_snr_threshold=opts.newsnr_threshold, - slide_id=slide_id ) else: logging.info("Loading triggers...") @@ -84,7 +83,6 @@ def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): ifos, vetoes, rw_snr_threshold=opts.newsnr_threshold, - slide_id=slide_id ) # Load SNR data @@ -166,7 +164,9 @@ def plot_deviation(percentile, snr_grid, y, ax, style): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser( + description=__doc__, version=__version__ +) parser.add_argument( "-t", "--trig-file", @@ -188,9 +188,7 @@ parser.add_argument( help="Output file a zoomed in version of the plot.", ) ppu.pygrb_add_bestnr_cut_opt(parser) -ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() -ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -236,10 +234,10 @@ ifos, vetoes = ppu.extract_ifos_and_vetoes( ) # Extract trigger data -trig_data = load_data(trig_file, ifos, vetoes, opts, slide_id=opts.slide_id) +trig_data = load_data(trig_file, ifos, vetoes, opts) # Extract (or initialize) injection data -inj_data = load_data(found_file, ifos, vetoes, opts, injections=True, slide_id=0) +inj_data = load_data(found_file, ifos, vetoes, opts, injections=True) # Generate plots logging.info("Plotting...") diff --git a/bin/pygrb/pycbc_pygrb_plot_injs_results b/bin/pygrb/pycbc_pygrb_plot_injs_results index 7a67d1dfab3..1802ada4966 100644 --- a/bin/pygrb/pycbc_pygrb_plot_injs_results +++ b/bin/pygrb/pycbc_pygrb_plot_injs_results @@ -170,7 +170,8 @@ def load_data(input_file_handle, keys, tag): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("--found-missed-file", help="The hdf injection results file", required=True) parser.add_argument("--trig-file", diff --git a/bin/pygrb/pycbc_pygrb_plot_null_stats b/bin/pygrb/pycbc_pygrb_plot_null_stats index 2a3f1b95029..5652ce8dab5 100644 --- a/bin/pygrb/pycbc_pygrb_plot_null_stats +++ b/bin/pygrb/pycbc_pygrb_plot_null_stats @@ -47,7 +47,7 @@ __program__ = "pycbc_pygrb_plot_null_stats" # Functions # ============================================================================= # Function to load trigger data -def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): +def load_data(input_file, ifos, vetoes, opts, injections=False): """Load data from a trigger/injection file""" null_stat_type = opts.y_variable @@ -63,14 +63,12 @@ def load_data(input_file, ifos, vetoes, opts, injections=False, slide_id=None): # This will eventually become ppu.load_injections() trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=opts.newsnr_threshold, - slide_id=slide_id) + rw_snr_threshold=opts.newsnr_threshold) else: logging.info("Loading triggers...") trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=opts.newsnr_threshold, - slide_id=slide_id) + rw_snr_threshold=opts.newsnr_threshold) # Coherent SNR is always used data['coherent'] = trigs_or_injs['network/coherent_snr'] @@ -129,7 +127,8 @@ def calculate_contours(opts, new_snrs=None): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") @@ -142,9 +141,7 @@ parser.add_argument("-y", "--y-variable", default=None, help="Quantity to plot on the vertical axis.") ppu.pygrb_add_null_snr_opts(parser) ppu.pygrb_add_bestnr_cut_opt(parser) -ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() -ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -186,12 +183,10 @@ ifos, vetoes = ppu.extract_ifos_and_vetoes(trig_file, opts.veto_files, opts.veto_category) # Extract trigger data -trig_data = load_data(trig_file, ifos, vetoes, opts, - slide_id=opts.slide_id) +trig_data = load_data(trig_file, ifos, vetoes, opts) # Extract (or initialize) injection data -inj_data = load_data(found_missed_file, ifos, vetoes, opts, - injections=True, slide_id=0) +inj_data = load_data(found_missed_file, ifos, vetoes, opts, injections=True) # Generate plots logging.info("Plotting...") diff --git a/bin/pygrb/pycbc_pygrb_plot_skygrid b/bin/pygrb/pycbc_pygrb_plot_skygrid index 435f37f1d20..4cc37b76832 100644 --- a/bin/pygrb/pycbc_pygrb_plot_skygrid +++ b/bin/pygrb/pycbc_pygrb_plot_skygrid @@ -58,7 +58,8 @@ def load_data(input_file, ifos, vetoes, injections=False): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") diff --git a/bin/pygrb/pycbc_pygrb_plot_snr_timeseries b/bin/pygrb/pycbc_pygrb_plot_snr_timeseries index 2f8266cca32..75b567d6483 100644 --- a/bin/pygrb/pycbc_pygrb_plot_snr_timeseries +++ b/bin/pygrb/pycbc_pygrb_plot_snr_timeseries @@ -50,7 +50,7 @@ __program__ = "pycbc_pygrb_plot_snr_timeseries" # ============================================================================= # Load trigger data def load_data(input_file, ifos, vetoes, rw_snr_threshold=None, - injections=False, slide_id=None): + injections=False): """Load data from a trigger/injection file""" trigs_or_injs = None @@ -60,14 +60,12 @@ def load_data(input_file, ifos, vetoes, rw_snr_threshold=None, # This will eventually become load_injections trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold, - slide_id=slide_id) + rw_snr_threshold=rw_snr_threshold) else: logging.info("Loading triggers...") trigs_or_injs = \ ppu.load_triggers(input_file, ifos, vetoes, - rw_snr_threshold=rw_snr_threshold, - slide_id=slide_id) + rw_snr_threshold=rw_snr_threshold) return trigs_or_injs @@ -97,7 +95,8 @@ def reset_times(data_time, trig_time): # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-t", "--trig-file", action="store", default=None, required=True, help="The location of the trigger file") @@ -109,9 +108,7 @@ parser.add_argument("-y", "--y-variable", default=None, choices=['coherent', 'single', 'reweighted', 'null'], help="Quantity to plot on the vertical axis.") ppu.pygrb_add_bestnr_cut_opt(parser) -ppu.pygrb_add_slide_opts(parser) opts = parser.parse_args() -ppu.slide_opts_helper(opts) init_logging(opts.verbose, format="%(asctime)s: %(levelname)s: %(message)s") @@ -140,24 +137,21 @@ ifos, vetoes = ppu.extract_ifos_and_vetoes(trig_file, opts.veto_files, # points to show the impact of the cut, otherwise remove points with # reweighted SNR below threshold if snr_type == 'reweighted': - trig_data = load_data(trig_file, ifos, vetoes, - slide_id=opts.slide_id) + trig_data = load_data(trig_file, ifos, vetoes) trig_data['network/reweighted_snr'] = \ reweightedsnr_cut(trig_data['network/reweighted_snr'], opts.newsnr_threshold) - inj_data = load_data(inj_file, ifos, vetoes, injections=True, - slide_id=0) + inj_data = load_data(inj_file, ifos, vetoes, injections=True) if inj_data is not None: inj_data['network/reweighted_snr'] = \ reweightedsnr_cut(inj_data['network/reweighted_snr'], opts.newsnr_threshold) else: trig_data = load_data(trig_file, ifos, vetoes, - rw_snr_threshold=opts.newsnr_threshold, - slide_id=opts.slide_id) + rw_snr_threshold=opts.newsnr_threshold) inj_data = load_data(inj_file, ifos, vetoes, rw_snr_threshold=opts.newsnr_threshold, - injections=True, slide_id=0) + injections=True) # Specify HDF file keys for x quantity (time) and y quantity (SNR) if snr_type == 'single': diff --git a/bin/pygrb/pycbc_pygrb_plot_stats_distribution b/bin/pygrb/pycbc_pygrb_plot_stats_distribution index 827b8467eaa..bca30d940ca 100644 --- a/bin/pygrb/pycbc_pygrb_plot_stats_distribution +++ b/bin/pygrb/pycbc_pygrb_plot_stats_distribution @@ -47,7 +47,8 @@ __program__ = "pycbc_pygrb_plot_stats_distribution" # ============================================================================= # Main script starts here # ============================================================================= -parser = ppu.pygrb_initialize_plot_parser(description=__doc__) +parser = ppu.pygrb_initialize_plot_parser(description=__doc__, + version=__version__) parser.add_argument("-F", "--trig-file", action="store", required=True, help="Location of off-source trigger file") parser.add_argument("-x", "--x-variable", required=True, diff --git a/bin/pygrb/pycbc_pygrb_pp_workflow b/bin/pygrb/pycbc_pygrb_pp_workflow index 73620850291..fc14632d79d 100644 --- a/bin/pygrb/pycbc_pygrb_pp_workflow +++ b/bin/pygrb/pycbc_pygrb_pp_workflow @@ -49,6 +49,7 @@ __program__ = "pycbc_pygrb_pp_workflow" # Use the standard workflow command-line parsing routines. parser = argparse.ArgumentParser(description=__doc__[1:]) pycbc.add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) parser.add_argument("-t", "--trig-files", action="store", required=True, nargs="+", help="The locations of the trigger files " diff --git a/bin/workflows/pycbc_make_bank_verifier_workflow b/bin/workflows/pycbc_make_bank_verifier_workflow index 8f5b6808477..27349611f70 100644 --- a/bin/workflows/pycbc_make_bank_verifier_workflow +++ b/bin/workflows/pycbc_make_bank_verifier_workflow @@ -156,6 +156,7 @@ class BanksimTablePointInjsExecutable(wf.Executable): _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_faithsim_workflow b/bin/workflows/pycbc_make_faithsim_workflow index 9bcb6a13ed0..37aae89a3a5 100755 --- a/bin/workflows/pycbc_make_faithsim_workflow +++ b/bin/workflows/pycbc_make_faithsim_workflow @@ -18,6 +18,9 @@ from pycbc import add_common_pycbc_options, init_logging from pycbc.workflow.plotting import PlotExecutable from pycbc.workflow import setup_splittable_dax_generated +__version__ = pycbc.version.git_verbose_msg + + def make_faithsim_plot(workflow, analysis_time, input_file, out_dir, tags=None): tags = [] if tags is None else tags secs = workflow.cp.get_subsections("pycbc_faithsim_plots") @@ -97,6 +100,7 @@ class CollectResultsExecutable(wf.Executable): parser = argparse.ArgumentParser(description=__doc__) add_common_pycbc_options(parser) +parser.add_argument("--version", action="version", version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_inference_inj_workflow b/bin/workflows/pycbc_make_inference_inj_workflow index ffb6a47167a..a0cee1d1a94 100644 --- a/bin/workflows/pycbc_make_inference_inj_workflow +++ b/bin/workflows/pycbc_make_inference_inj_workflow @@ -23,9 +23,11 @@ import logging import os import shlex import numpy +import pycbc.version import socket import sys +from pycbc import __version__ from pycbc import results, init_logging, add_common_pycbc_options from pycbc.results import layout from pycbc.results import metadata @@ -89,6 +91,9 @@ core.add_workflow_settings_cli(parser, include_subdax_opts=True) parser.add_argument("--seed", type=int, default=0, help="Starting to seed to use. This will be incremented " "one for each injection analyzed. Default is 0.") +# version option +parser.add_argument("--version", action="version", version=__version__, + help="Prints version information.") # parser command line opts = parser.parse_args() diff --git a/bin/workflows/pycbc_make_inference_plots_workflow b/bin/workflows/pycbc_make_inference_plots_workflow index 5ce1e74dccf..a74b6afe0b8 100644 --- a/bin/workflows/pycbc_make_inference_plots_workflow +++ b/bin/workflows/pycbc_make_inference_plots_workflow @@ -41,6 +41,7 @@ from pycbc.workflow import core from pycbc.workflow import datafind from pycbc.workflow import plotting from pycbc.workflow import versioning +from pycbc import __version__ import pycbc.workflow.inference_followups as inffu @@ -133,6 +134,8 @@ add_common_pycbc_options(parser) configuration.add_workflow_command_line_group(parser) # workflow options core.add_workflow_settings_cli(parser, include_subdax_opts=True) +parser.add_argument("--version", action="version", version=__version__, + help="Prints version information.") opts = parser.parse_args() posterior_file_dir = 'posterior_files' diff --git a/bin/workflows/pycbc_make_inference_workflow b/bin/workflows/pycbc_make_inference_workflow index 4806a6365e3..9139c3cc0f3 100644 --- a/bin/workflows/pycbc_make_inference_workflow +++ b/bin/workflows/pycbc_make_inference_workflow @@ -40,6 +40,7 @@ from pycbc.workflow import core from pycbc.workflow import datafind from pycbc.workflow import plotting from pycbc.workflow import versioning +from pycbc import __version__ import pycbc.workflow.inference_followups as inffu from pycbc.workflow.jobsetup import PycbcInferenceExecutable @@ -161,6 +162,9 @@ parser.add_argument("--seed", type=int, default=0, help="Seed to use for inference job(s). If multiple " "events are analyzed, the seed will be incremented " "by one for each event.") +# version option +parser.add_argument("--version", action="version", version=__version__, + help="Prints version information.") # parser command line diff --git a/bin/workflows/pycbc_make_offline_search_workflow b/bin/workflows/pycbc_make_offline_search_workflow index 2d6a2aef729..50cc7dd8c01 100755 --- a/bin/workflows/pycbc_make_offline_search_workflow +++ b/bin/workflows/pycbc_make_offline_search_workflow @@ -22,6 +22,10 @@ finding and ranking then generate post-processing and plots. """ import pycbc +import pycbc.version +__version__ = pycbc.version.git_verbose_msg +__date__ = pycbc.version.date +__program__ = "pycbc_offline" import sys import socket @@ -149,6 +153,7 @@ def check_stop(job_name, container, workflow, finalize_workflow): parser = argparse.ArgumentParser(description=__doc__[1:]) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_psd_estimation_workflow b/bin/workflows/pycbc_make_psd_estimation_workflow index 7a5dc74861a..78ad36c24d6 100644 --- a/bin/workflows/pycbc_make_psd_estimation_workflow +++ b/bin/workflows/pycbc_make_psd_estimation_workflow @@ -29,12 +29,15 @@ from ligo import segments as _segments import lal import pycbc +import pycbc.version import pycbc.workflow from pycbc.results import save_fig_with_metadata, two_column_layout import pycbc.workflow parser = argparse.ArgumentParser(description=__doc__) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', + version=pycbc.version.git_verbose_msg) pycbc.workflow.add_workflow_command_line_group(parser) pycbc.workflow.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/bin/workflows/pycbc_make_sbank_workflow b/bin/workflows/pycbc_make_sbank_workflow index 70133647625..d413b5cb86f 100644 --- a/bin/workflows/pycbc_make_sbank_workflow +++ b/bin/workflows/pycbc_make_sbank_workflow @@ -28,9 +28,16 @@ import os import argparse import pycbc +import pycbc.version import pycbc.workflow as wf import pycbc.workflow.pegasus_workflow as pwf +# Boiler-plate stuff +__author__ = "Ian Harry " +__version__ = pycbc.version.git_verbose_msg +__date__ = pycbc.version.date +__program__ = "pycbc_make_sbank_workflow" + # We define classes for all executables used in the workflow class SbankExecutable(wf.Executable): @@ -164,6 +171,7 @@ class CombineHDFBanksExecutable(wf.Executable): _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__) parser.add_argument("--output-file", type=str, default=None, help="Specify the output file name. Either a name can be " "provided or a full path to file. Is this is not " diff --git a/bin/workflows/pycbc_make_uberbank_workflow b/bin/workflows/pycbc_make_uberbank_workflow index 8d33187ca0c..c0498971706 100644 --- a/bin/workflows/pycbc_make_uberbank_workflow +++ b/bin/workflows/pycbc_make_uberbank_workflow @@ -142,6 +142,7 @@ class SbankDaxGenerator(wf.Executable): _desc = __doc__[1:] parser = argparse.ArgumentParser(description=_desc) pycbc.add_common_pycbc_options(parser) +parser.add_argument('--version', action='version', version=__version__) wf.add_workflow_command_line_group(parser) wf.add_workflow_settings_cli(parser) args = parser.parse_args() diff --git a/examples/inference/margtime/margtime.ini b/examples/inference/margtime/margtime.ini index fd17c0e7e5a..ce2bf7bebbd 100644 --- a/examples/inference/margtime/margtime.ini +++ b/examples/inference/margtime/margtime.ini @@ -3,11 +3,8 @@ name = marginalized_time low-frequency-cutoff = 30.0 -# This is the sample rate used for the model and marginalization -sample_rate = 4096 - marginalize_vector_params = tc, ra, dec, polarization -marginalize_vector_samples = 2000 +marginalize_vector_samples = 500 ; You shouldn't use phase marginalization if the approximant has ; higher-order modes diff --git a/examples/inference/margtime/run.sh b/examples/inference/margtime/run.sh index 3d9a7f32a73..383d348ef29 100644 --- a/examples/inference/margtime/run.sh +++ b/examples/inference/margtime/run.sh @@ -1,6 +1,6 @@ OMP_NUM_THREADS=1 pycbc_inference \ --config-file `dirname "$0"`/margtime.ini \ ---nprocesses 1 \ +--nprocesses 2 \ --processing-scheme mkl \ --output-file marg_150914.hdf \ --seed 0 \ @@ -23,5 +23,4 @@ pycbc_inference_plot_posterior \ "primary_mass(mass1, mass2) / (1 + redshift(distance)):srcmass1" \ "secondary_mass(mass1, mass2) / (1 + redshift(distance)):srcmass2" \ ra dec tc inclination coa_phase polarization distance \ ---vmin 23.2 \ --z-arg snr diff --git a/examples/search/analysis.ini b/examples/search/analysis.ini index 4361ef1eb2c..2bed5df7b2c 100644 --- a/examples/search/analysis.ini +++ b/examples/search/analysis.ini @@ -217,9 +217,6 @@ fit-threshold = ${sngls_statmap|fit-threshold} [combine_statmap] cluster-window = ${statmap|cluster-window} -far-calculation-method = ${sngls_statmap|far-calculation-method} -fit-function = ${sngls_statmap|fit-function} -fit-threshold = ${sngls_statmap|fit-threshold} [combine_statmap-full_data] max-hierarchical-removal = ${workflow-results|max-hierarchical-removal} diff --git a/examples/search/plotting.ini b/examples/search/plotting.ini index 5a7d4f55837..0b1ab2a2cbe 100644 --- a/examples/search/plotting.ini +++ b/examples/search/plotting.ini @@ -61,8 +61,6 @@ window = 0.1 [html_snippet] [page_coincinfo] -sngl-ranking = newsnr_sgveto_psdvar - [page_coincinfo-background] statmap-file-subspace-name=background_exc diff --git a/pycbc/__init__.py b/pycbc/__init__.py index 99f4cc6ef37..3b6f6f1e8f2 100644 --- a/pycbc/__init__.py +++ b/pycbc/__init__.py @@ -32,8 +32,6 @@ import logging import random import string -import importlib.util -import importlib.machinery from datetime import datetime as dt try: @@ -41,11 +39,9 @@ # before version.py has been generated. from .version import git_hash from .version import version as pycbc_version - from .version import PyCBCVersionAction except: git_hash = 'none' pycbc_version = 'none' - PyCBCVersionAction = None __version__ = pycbc_version @@ -83,23 +79,12 @@ def add_common_pycbc_options(parser): title="PyCBC common options", description="Common options for PyCBC executables.", ) - group.add_argument( - '-v', - '--verbose', - action='count', - default=0, - help=( - 'Add verbosity to logging. Adding the option ' - 'multiple times makes logging progressively ' - 'more verbose, e.g. --verbose or -v provides ' - 'logging at the info level, but -vv or ' - '--verbose --verbose provides debug logging.' - ) - ) - group.add_argument( - '--version', - action=PyCBCVersionAction, - ) + group.add_argument('-v', '--verbose', action='count', default=0, + help='Add verbosity to logging. Adding the option ' + 'multiple times makes logging progressively ' + 'more verbose, e.g. --verbose or -v provides ' + 'logging at the info level, but -vv or ' + '--verbose --verbose provides debug logging.') def init_logging(verbose=False, default_level=0, to_file=None, @@ -212,13 +197,8 @@ def makedir(path): # preserve common state information which we have relied on when using # multiprocessing based pools. import multiprocessing - if multiprocessing.get_start_method(allow_none=True) is None: - if hasattr(multiprocessing, 'set_start_method'): - multiprocessing.set_start_method('fork') - elif multiprocessing.get_start_method() != 'fork': - warnings.warn("PyCBC requires the use of the 'fork' start method" - " for multiprocessing, it is currently set to {}" - .format(multiprocessing.get_start_method())) + if hasattr(multiprocessing, 'set_start_method'): + multiprocessing.set_start_method('fork') else: HAVE_OMP = True @@ -234,16 +214,3 @@ def gps_now(): from astropy.time import Time return float(Time.now().gps) - -# This is needed as a backwards compatibility. The function was removed in -# python 3.12. -def load_source(modname, filename): - loader = importlib.machinery.SourceFileLoader(modname, filename) - spec = importlib.util.spec_from_file_location(modname, filename, - loader=loader) - module = importlib.util.module_from_spec(spec) - # The module is always executed and not cached in sys.modules. - # Uncomment the following line to cache the module. - # sys.modules[module.__name__] = module - loader.exec_module(module) - return module diff --git a/pycbc/_version.py b/pycbc/_version.py index a68d1981160..cdb4e915a3d 100644 --- a/pycbc/_version.py +++ b/pycbc/_version.py @@ -75,82 +75,40 @@ def get_lal_info(module, lib_glob): return version_str -class PyCBCVersionAction(argparse._StoreAction): - """Subclass of argparse._StoreAction that prints version information for - PyCBC, and for LAL and LALSimulation depending on an integer variable. - Can be supplied without the option +class Version(argparse.Action): + """Subclass of argparse.Action that prints version information for PyCBC, + LAL and LALSimulation. """ - default_help = ( - 'Display PyCBC version information and exit. ' - 'Can optionally supply a modifier integer to control the ' - 'verbosity of the version information. 0 and 1 are the ' - 'same as --version; 2 provides more detailed PyCBC library ' - 'information; 3 provides information about PyCBC, ' - 'LAL and LALSimulation packages (if installed)' - ) - - def __init__(self, - option_strings, - dest, - help=default_help, - **kw): - argparse._StoreAction.__init__( - self, - option_strings, - dest=dest, - nargs='?', - help=help, - type=int, - **kw, - ) + def __init__(self, nargs=0, **kw): + super(Version, self).__init__(nargs=nargs, **kw) def __call__(self, parser, namespace, values, option_string=None): - version_no = 0 if values is None else values import pycbc - setattr(namespace, self.dest, version_no) - if version_no <= 1: - # --version called with zero or default - return the - # simple version string - version_str = "PyCBC version: " + pycbc.version.version - if version_no > 1: - # --version with flag above 1 - return the verbose version string - version_str = ( - "--- PyCBC Version --------------------------\n" + - pycbc.version.git_verbose_msg - ) - if version_no > 2: - # --version called more than twice - print all version information - # possible - import __main__ - version_str += ( - "\n\nCurrent Executable: " + __main__.__file__ + - "\nImported from: " + inspect.getfile(pycbc) + - "\n\n--- LAL Version ----------------------------\n" - ) - try: - import lal.git_version - except ImportError: - version_str += "\nLAL not installed in environment\n" - else: - version_str += get_lal_info( - lal, - '_lal*.so' - ) - - version_str += "\n\n--- LALSimulation Version-------------------\n" - try: - import lalsimulation.git_version - except ImportError: - version_str += "\nLALSimulation not installed in environment\n" - else: - version_str += get_lal_info( - lalsimulation, - '_lalsimulation*.so' - ) + version_str = ( + "--- PyCBC Version --------------------------\n" + + pycbc.version.git_verbose_msg + + "\n\nImported from: " + inspect.getfile(pycbc) + ) + + version_str += "\n\n--- LAL Version ----------------------------\n" + try: + import lal.git_version + except ImportError: + version_str += "\nLAL not installed in environment\n" + else: + version_str += get_lal_info(lal, '_lal*.so') + + version_str += "\n\n--- LALSimulation Version-------------------\n" + try: + import lalsimulation.git_version + except ImportError: + version_str += "\nLALSimulation not installed in environment\n" + else: + version_str += get_lal_info(lalsimulation, '_lalsimulation*.so') print(version_str) sys.exit(0) -__all__ = ['PyCBCVersionAction'] +__all__ = ['Version'] diff --git a/pycbc/events/coinc.py b/pycbc/events/coinc.py index ee729a97bcc..7bdf298a23b 100644 --- a/pycbc/events/coinc.py +++ b/pycbc/events/coinc.py @@ -25,17 +25,9 @@ coincident triggers. """ -import numpy -import logging -import copy -import time as timemod -import threading - -import pycbc.pnutils +import numpy, logging, pycbc.pnutils, copy from pycbc.detector import Detector, ppdets from pycbc import conversions as conv - -from . import stat as pycbcstat from .eventmgr_cython import coincbuffer_expireelements from .eventmgr_cython import coincbuffer_numgreater from .eventmgr_cython import timecoincidence_constructidxs @@ -316,7 +308,7 @@ def win(ifo1, ifo2): # tested against fixed and pivot are now present for testing with new # dependent ifos for ifo2 in ids: - logger.info('added ifo %s, testing against %s', ifo1, ifo2) + logger.info('added ifo %s, testing against %s' % (ifo1, ifo2)) w = win(ifo1, ifo2) left = time1.searchsorted(ctimes[ifo2] - w) right = time1.searchsorted(ctimes[ifo2] + w) @@ -837,7 +829,6 @@ def __init__(self, num_templates, analysis_block, background_statistic, ifar_limit=100, timeslide_interval=.035, coinc_window_pad=.002, - statistic_refresh_rate=None, return_background=False, **kwargs): """ @@ -865,9 +856,6 @@ def __init__(self, num_templates, analysis_block, background_statistic, coinc_window_pad: float Amount of time allowed to form a coincidence in addition to the time of flight in seconds. - statistic_refresh_rate: float - How regularly to run the update_files method on the statistic - class (in seconds), default not do do this return_background: boolean If true, background triggers will also be included in the file output. @@ -875,10 +863,11 @@ class (in seconds), default not do do this Additional options for the statistic to use. See stat.py for more details on statistic options. """ + from . import stat self.num_templates = num_templates self.analysis_block = analysis_block - stat_class = pycbcstat.get_statistic(background_statistic) + stat_class = stat.get_statistic(background_statistic) self.stat_calculator = stat_class( sngl_ranking, stat_files, @@ -886,10 +875,6 @@ class (in seconds), default not do do this **kwargs ) - self.time_stat_refreshed = timemod.time() - self.stat_calculator_lock = threading.Lock() - self.statistic_refresh_rate = statistic_refresh_rate - self.timeslide_interval = timeslide_interval self.return_background = return_background self.coinc_window_pad = coinc_window_pad @@ -970,6 +955,7 @@ def pick_best_coinc(cls, coinc_results): @classmethod def from_cli(cls, args, num_templates, analysis_chunk, ifos): + from . import stat # Allow None inputs stat_files = args.statistic_files or [] @@ -978,7 +964,7 @@ def from_cli(cls, args, num_templates, analysis_chunk, ifos): # flatten the list of lists of filenames to a single list (may be empty) stat_files = sum(stat_files, []) - kwargs = pycbcstat.parse_statistic_keywords_opt(stat_keywords) + kwargs = stat.parse_statistic_keywords_opt(stat_keywords) return cls(num_templates, analysis_chunk, args.ranking_statistic, @@ -989,13 +975,13 @@ def from_cli(cls, args, num_templates, analysis_chunk, ifos): timeslide_interval=args.timeslide_interval, ifos=ifos, coinc_window_pad=args.coinc_window_pad, - statistic_refresh_rate=args.statistic_refresh_rate, **kwargs) @staticmethod def insert_args(parser): + from . import stat - pycbcstat.insert_statistic_option_group(parser) + stat.insert_statistic_option_group(parser) group = parser.add_argument_group('Coincident Background Estimation') group.add_argument('--store-background', action='store_true', @@ -1388,12 +1374,11 @@ def add_singles(self, results): valid_ifos = [k for k in results.keys() if results[k] and k in self.ifos] if len(valid_ifos) == 0: return {} - with self.stat_calculator_lock: - # Add single triggers to the internal buffer - self._add_singles_to_buffer(results, ifos=valid_ifos) + # Add single triggers to the internal buffer + self._add_singles_to_buffer(results, ifos=valid_ifos) - # Calculate zerolag and background coincidences - _, coinc_results = self._find_coincs(results, valid_ifos=valid_ifos) + # Calculate zerolag and background coincidences + _, coinc_results = self._find_coincs(results, valid_ifos=valid_ifos) # record if a coinc is possible in this chunk if len(valid_ifos) == 2: @@ -1401,51 +1386,6 @@ def add_singles(self, results): return coinc_results - def start_refresh_thread(self): - """ - Start a thread managing whether the stat_calculator will be updated - """ - if self.statistic_refresh_rate is None: - logger.info( - "Statistic refresh disabled for %s", ppdets(self.ifos, "-") - ) - return - thread = threading.Thread( - target=self.refresh_statistic, - daemon=True, - name="Stat refresh " + ppdets(self.ifos, "-") - ) - logger.info( - "Starting %s statistic refresh thread", ppdets(self.ifos, "-") - ) - thread.start() - - def refresh_statistic(self): - """ - Function to refresh the stat_calculator at regular intervals - """ - while True: - # How long since the statistic was last updated? - since_stat_refresh = timemod.time() - self.time_stat_refreshed - if since_stat_refresh > self.statistic_refresh_rate: - self.time_stat_refreshed = timemod.time() - logger.info( - "Checking %s statistic for updated files", - ppdets(self.ifos, "-"), - ) - with self.stat_calculator_lock: - self.stat_calculator.check_update_files() - # Sleep one second for safety - timemod.sleep(1) - # Now include the time it took the check / update the statistic - since_stat_refresh = timemod.time() - self.time_stat_refreshed - logger.debug( - "%s statistic: Waiting %.3fs for next refresh", - ppdets(self.ifos, "-"), - self.statistic_refresh_rate - since_stat_refresh, - ) - timemod.sleep(self.statistic_refresh_rate - since_stat_refresh + 1) - __all__ = [ "background_bin_from_string", diff --git a/pycbc/events/single.py b/pycbc/events/single.py index db22fd08712..2df8fb8f4e6 100644 --- a/pycbc/events/single.py +++ b/pycbc/events/single.py @@ -2,14 +2,12 @@ """ import logging import copy -import threading -import time +import h5py import numpy as np from pycbc.events import trigger_fits as fits, stat from pycbc.types import MultiDetOptionAction from pycbc import conversions as conv -from pycbc.io.hdf import HFile from pycbc import bin_utils logger = logging.getLogger('pycbc.events.single') @@ -27,58 +25,13 @@ def __init__(self, ifo, statistic=None, sngl_ranking=None, stat_files=None, - statistic_refresh_rate=None, **kwargs): - """ - Parameters - ---------- - ifo: str - Name of the ifo that is being analyzed - newsnr_threshold: float - Minimum value for the reweighted SNR of the event under - consideration. Which reweighted SNR is defined by sngl_ranking - reduced_chisq_threshold: float - Maximum value for the reduced chisquared of the event under - consideration - duration_threshold: float - Minimum value for the duration of the template which found the - event under consideration - fit_file: str or path - (optional) the file containing information about the - single-detector event significance distribution fits - sngl_ifar_est_dist: str - Which trigger distribution to use when calculating IFAR of - single-detector events - fixed_ifar: float - (optional) give a fixed IFAR value to any event which passes the - threshold criteria - statistic: str - The name of the statistic to rank events. - sngl_ranking: str - The single detector ranking to use with the background statistic - stat_files: list of strs - List of filenames that contain information used to construct - various coincident statistics. - maximum_ifar: float - The largest inverse false alarm rate in years that we would like to - calculate. - statistic_refresh_rate: float - How regularly to run the update_files method on the statistic - class (in seconds), default not do do this - kwargs: dict - Additional options for the statistic to use. See stat.py - for more details on statistic options. - """ self.ifo = ifo self.fit_file = fit_file self.sngl_ifar_est_dist = sngl_ifar_est_dist self.fixed_ifar = fixed_ifar self.maximum_ifar = maximum_ifar - self.time_stat_refreshed = time.time() - self.stat_calculator_lock = threading.Lock() - self.statistic_refresh_rate = statistic_refresh_rate - stat_class = stat.get_statistic(statistic) self.stat_calculator = stat_class( sngl_ranking, @@ -235,7 +188,6 @@ def from_cli(cls, args, ifo): statistic=args.ranking_statistic, sngl_ranking=args.sngl_ranking, stat_files=stat_files, - statistic_refresh_rate=args.statistic_refresh_rate, **kwargs ) @@ -275,9 +227,7 @@ def check(self, trigs, data_reader): trigsc['chisq_dof'] = (cut_trigs['chisq_dof'] + 2) / 2 # Calculate the ranking reweighted SNR for cutting - with self.stat_calculator_lock: - single_rank = self.stat_calculator.get_sngl_ranking(trigsc) - + single_rank = self.stat_calculator.get_sngl_ranking(trigsc) sngl_idx = single_rank > self.thresholds['ranking'] if not np.any(sngl_idx): return None @@ -286,9 +236,8 @@ def check(self, trigs, data_reader): for k in trigs} # Calculate the ranking statistic - with self.stat_calculator_lock: - sngl_stat = self.stat_calculator.single(cutall_trigs) - rank = self.stat_calculator.rank_stat_single((self.ifo, sngl_stat)) + sngl_stat = self.stat_calculator.single(cutall_trigs) + rank = self.stat_calculator.rank_stat_single((self.ifo, sngl_stat)) # 'cluster' by taking the maximal statistic value over the trigger set i = rank.argmax() @@ -316,7 +265,7 @@ def calculate_ifar(self, sngl_ranking, duration): return self.fixed_ifar[self.ifo] try: - with HFile(self.fit_file, 'r') as fit_file: + with h5py.File(self.fit_file, 'r') as fit_file: bin_edges = fit_file['bins_edges'][:] live_time = fit_file[self.ifo].attrs['live_time'] thresh = fit_file.attrs['fit_threshold'] @@ -354,43 +303,3 @@ def calculate_ifar(self, sngl_ranking, duration): rate_louder *= len(rates) return min(conv.sec_to_year(1. / rate_louder), self.maximum_ifar) - - def start_refresh_thread(self): - """ - Start a thread managing whether the stat_calculator will be updated - """ - if self.statistic_refresh_rate is None: - logger.info("Statistic refresh disabled for %s", self.ifo) - return - thread = threading.Thread( - target=self.refresh_statistic, - daemon=True, - name="Stat refresh " + self.ifo - ) - logger.info("Starting %s statistic refresh thread", self.ifo) - thread.start() - - def refresh_statistic(self): - """ - Function to refresh the stat_calculator at regular intervals - """ - while True: - # How long since the statistic was last updated? - since_stat_refresh = time.time() - self.time_stat_refreshed - if since_stat_refresh > self.statistic_refresh_rate: - self.time_stat_refreshed = time.time() - logger.info( - "Checking %s statistic for updated files", self.ifo - ) - with self.stat_calculator_lock: - self.stat_calculator.check_update_files() - # Sleep one second for safety - time.sleep(1) - # Now use the time it took the check / update the statistic - since_stat_refresh = time.time() - self.time_stat_refreshed - logger.debug( - "%s statistic: Waiting %.3fs for next refresh", - self.ifo, - self.statistic_refresh_rate - since_stat_refresh - ) - time.sleep(self.statistic_refresh_rate - since_stat_refresh) diff --git a/pycbc/events/stat.py b/pycbc/events/stat.py index ff8da9a2f19..fb4bc8eee9a 100644 --- a/pycbc/events/stat.py +++ b/pycbc/events/stat.py @@ -26,11 +26,8 @@ values. """ import logging -from hashlib import sha1 -from datetime import datetime as dt import numpy import h5py - from . import ranking from . import coinc_rate from .eventmgr_cython import logsignalrateinternals_computepsignalbins @@ -71,9 +68,6 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): " %s. Can't provide more than one!" % stat) logger.info("Found file %s for stat %s", filename, stat) self.files[stat] = filename - # Keep track of when stat files hashes so it can be - # reloaded if it has changed - self.file_hashes = self.get_file_hashes() # Provide the dtype of the single detector method's output # This is used by background estimation codes that need to maintain @@ -91,67 +85,6 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): if key.startswith('sngl_ranking_'): self.sngl_ranking_kwargs[key[13:]] = value - def get_file_hashes(self): - """ - Get sha1 hashes for all the files - """ - logger.debug( - "Getting file hashes" - ) - start = dt.now() - file_hashes = {} - for stat, filename in self.files.items(): - with open(filename, 'rb') as file_binary: - file_hashes[stat] = sha1(file_binary.read()).hexdigest() - logger.debug( - "Got file hashes for %d files, took %.3es", - len(self.files), - (dt.now() - start).total_seconds() - ) - return file_hashes - - def files_changed(self): - """ - Compare hashes of files now with the ones we have cached - """ - changed_file_hashes = self.get_file_hashes() - for stat, old_hash in self.file_hashes.items(): - if changed_file_hashes[stat] != old_hash: - logger.info( - "%s statistic file %s has changed", - ''.join(self.ifos), - stat, - ) - else: - # Remove the dataset from the dictionary of hashes - del changed_file_hashes[stat] - - if changed_file_hashes == {}: - logger.debug( - "No %s statistic files have changed", - ''.join(self.ifos) - ) - - return list(changed_file_hashes.keys()) - - def check_update_files(self): - """ - Check whether files associated with the statistic need updated, - then do so for each file which needs changing - """ - files_changed = self.files_changed() - for file_key in files_changed: - self.update_file(file_key) - self.file_hashes = self.get_file_hashes() - - def update_file(self, key): - """ - Update file used in this statistic referenced by key. - """ - err_msg = "This function is a stub that should be overridden by the " - err_msg += "sub-classes. You shouldn't be seeing this error!" - raise NotImplementedError(err_msg) - def get_sngl_ranking(self, trigs): """ Returns the ranking for the single detector triggers. @@ -418,13 +351,6 @@ def __init__(self, sngl_ranking, files=None, ifos=None, if pregenerate_hist and not len(ifos) == 1: self.get_hist() - elif len(ifos) == 1: - # remove all phasetd files from self.files and self.file_hashes, - # as they are not needed - for k in list(self.files.keys()): - if 'phasetd_newsnr' in k: - del self.files[k] - del self.file_hashes[k] def get_hist(self, ifos=None): """ @@ -454,20 +380,8 @@ def get_hist(self, ifos=None): selected = name break - # If there are other phasetd_newsnr files, they aren't needed. - # So tidy them out of the self.files dictionary - rejected = [key for key in self.files.keys() - if 'phasetd_newsnr' in key and not key == selected] - for k in rejected: - del self.files[k] - del self.file_hashes[k] - if selected is None and len(ifos) > 1: raise RuntimeError("Couldn't figure out which stat file to use") - if len(ifos) == 1: - # We dont need the histogram file, but we are trying to get one - # just skip it in this case - return logger.info("Using signal histogram %s for ifos %s", selected, ifos) weights = {} @@ -581,30 +495,6 @@ def get_hist(self, ifos=None): self.has_hist = True - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - if 'phasetd_newsnr' in key and not len(self.ifos) == 1: - if ''.join(sorted(self.ifos)) not in key: - logger.debug( - "%s file is not used for %s statistic", - key, - ''.join(self.ifos) - ) - return False - logger.info( - "Updating %s statistic %s file", - ''.join(self.ifos), - key - ) - # This is a PhaseTDStatistic file which needs updating - self.get_hist() - return True - return False - def logsignalrate(self, stats, shift, to_shift): """ Calculate the normalized log rate density of signals via lookup @@ -821,9 +711,7 @@ def coinc_lim_for_thresh(self, sngls_list, thresh, limifo, if not self.has_hist: self.get_hist() - fixed_statsq = sum( - [b['snglstat'] ** 2 for a, b in sngls_list if a != limifo] - ) + fixed_statsq = sum([b['snglstat'] ** 2 for a, b in sngls_list]) s1 = thresh ** 2. - fixed_statsq # Assume best case scenario and use maximum signal rate s1 -= 2. * self.hist_max @@ -864,11 +752,9 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): parsed_attrs = [f.split('-') for f in self.files.keys()] self.bg_ifos = [at[0] for at in parsed_attrs if (len(at) == 2 and at[1] == 'fit_coeffs')] - if not len(self.bg_ifos): raise RuntimeError("None of the statistic files has the required " "attribute called {ifo}-fit_coeffs !") - self.fits_by_tid = {} self.alphamax = {} for i in self.bg_ifos: @@ -923,26 +809,6 @@ def assign_fits(self, ifo): return fits_by_tid_dict - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - if key.endswith('-fit_coeffs'): - # This is a ExpFitStatistic file which needs updating - # Which ifo is it? - ifo = key[:2] - self.fits_by_tid[ifo] = self.assign_fits(ifo) - self.get_ref_vals(ifo) - logger.info( - "Updating %s statistic %s file", - ''.join(self.ifos), - key - ) - return True - return False - def get_ref_vals(self, ifo): """ Get the largest `alpha` value over all templates for given ifo. @@ -1285,18 +1151,6 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): PhaseTDStatistic.__init__(self, sngl_ranking, files=files, ifos=ifos, **kwargs) - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - # Here we inherit the PhaseTD and ExpFit file checks, - # nothing else needs doing - uf_exp_fit = ExpFitCombinedSNR.update_file(self, key) - uf_phasetd = PhaseTDStatistic.update_file(self, key) - return uf_exp_fit or uf_phasetd - def single(self, trigs): """ Calculate the necessary single detector information @@ -1445,24 +1299,6 @@ def reassign_rate(self, ifo): self.fits_by_tid[ifo]['fit_by_rate_above_thresh'] /= analysis_time self.fits_by_tid[ifo]['fit_by_rate_in_template'] /= analysis_time - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - # Check if the file to update is an ExpFit file - uf_expfit = ExpFitStatistic.update_file(self, key) - # If this has been updated we must do the reassign_rate step here - # on top of the file update from earlier - if uf_expfit: - # This is a fit coeff file which needs updating - # Which ifo is it? - ifo = key[:2] - self.reassign_rate(ifo) - return True - return False - def rank_stat_coinc(self, s, slide, step, to_shift, **kwargs): # pylint:disable=unused-argument """ @@ -1563,9 +1399,12 @@ def __init__(self, sngl_ranking, files=None, ifos=None, for ifo in self.bg_ifos: self.assign_median_sigma(ifo) - self.ref_ifos = reference_ifos.split(',') - self.benchmark_logvol = None - self.assign_benchmark_logvol() + ref_ifos = reference_ifos.split(',') + + # benchmark_logvol is a benchmark sensitivity array over template id + hl_net_med_sigma = numpy.amin([self.fits_by_tid[ifo]['median_sigma'] + for ifo in ref_ifos], axis=0) + self.benchmark_logvol = 3. * numpy.log(hl_net_med_sigma) self.single_increasing = False # Initialize variable to hold event template id(s) self.curr_tnum = None @@ -1586,41 +1425,6 @@ def assign_median_sigma(self, ifo): self.fits_by_tid[ifo]['median_sigma'] = \ coeff_file['median_sigma'][:][tid_sort] - def assign_benchmark_logvol(self): - """ - Assign the benchmark log-volume used by the statistic. - This is the sensitive log-volume of each template in the - network of reference IFOs - """ - # benchmark_logvol is a benchmark sensitivity array over template id - bench_net_med_sigma = numpy.amin( - [self.fits_by_tid[ifo]['median_sigma'] for ifo in self.ref_ifos], - axis=0, - ) - self.benchmark_logvol = 3. * numpy.log(bench_net_med_sigma) - - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - # Here we inherit the PhaseTD file checks - uf_phasetd = PhaseTDStatistic.update_file(self, key) - uf_exp_fit = ExpFitBgRateStatistic.update_file(self, key) - if uf_phasetd: - # The key to update refers to a PhaseTDStatistic file - return True - if uf_exp_fit: - # The key to update refers to a ExpFitBgRateStatistic file - # In this case we must reload some statistic information - # Which ifo is it? - ifo = key[:2] - self.assign_median_sigma(ifo) - self.assign_benchmark_logvol() - return True - return False - def lognoiserate(self, trigs, alphabelow=6): """ Calculate the log noise rate density over single-ifo ranking @@ -1670,12 +1474,6 @@ def single(self, trigs): numpy.ndarray The array of single detector values """ - try: - # exists if accessed via coinc_findtrigs - self.curr_tnum = trigs.template_num - except AttributeError: - # exists for SingleDetTriggers & pycbc_live get_coinc - self.curr_tnum = trigs['template_id'] # single-ifo stat = log of noise rate sngl_stat = self.lognoiserate(trigs) @@ -1687,6 +1485,12 @@ def single(self, trigs): singles['end_time'] = trigs['end_time'][:] singles['sigmasq'] = trigs['sigmasq'][:] singles['snr'] = trigs['snr'][:] + try: + # exists if accessed via coinc_findtrigs + self.curr_tnum = trigs.template_num + except AttributeError: + # exists for SingleDetTriggers & pycbc_live get_coinc + self.curr_tnum = trigs['template_id'] # Store benchmark log volume as single-ifo information since the coinc # method does not have access to template id @@ -1751,6 +1555,7 @@ def rank_stat_coinc(self, s, slide, step, to_shift, ln_noise_rate = coinc_rate.combination_noise_lograte( sngl_rates, kwargs['time_addition'], kwargs['dets']) + # Extent of time-difference space occupied noise_twindow = coinc_rate.multiifo_noise_coincident_area( self.hist_ifos, kwargs['time_addition'], @@ -2129,29 +1934,6 @@ def assign_kdes(self, kname): with h5py.File(self.files[kname + '-kde_file'], 'r') as kde_file: self.kde_by_tid[kname + '_kdevals'] = kde_file['data_kde'][:] - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - # Inherit from ExpFitFgBgNormStatistic - uf_expfit = ExpFitFgBgNormStatistic.update_file(self, key) - if uf_expfit: - # The key to update refers to a ExpFitFgBgNormStatistic file - return True - # Is the key a KDE statistic file that we update here? - if key.endswith('kde_file'): - logger.info( - "Updating %s statistic %s file", - ''.join(self.ifos), - key - ) - kde_style = key.split('-')[0] - self.assign_kdes(kde_style) - return True - return False - def kde_ratio(self): """ Calculate the weighting factor according to the ratio of the @@ -2271,46 +2053,14 @@ def __init__(self, sngl_ranking, files=None, ifos=None, ifos=ifos, **kwargs) self.dq_rates_by_state = {} self.dq_bin_by_tid = {} - self.dq_state_segments = None - self.low_latency = False - self.single_dtype.append(('dq_state', int)) + self.dq_state_segments = {} for ifo in self.ifos: key = f'{ifo}-dq_stat_info' if key in self.files.keys(): self.dq_rates_by_state[ifo] = self.assign_dq_rates(key) self.dq_bin_by_tid[ifo] = self.assign_template_bins(key) - self.check_low_latency(key) - if not self.low_latency: - if self.dq_state_segments is None: - self.dq_state_segments = {} - self.dq_state_segments[ifo] = self.setup_segments(key) - - def check_low_latency(self, key): - """ - Check if the statistic file indicates low latency mode. - Parameters - ---------- - key: str - Statistic file key string. - Returns - ------- - None - """ - ifo = key.split('-')[0] - with h5py.File(self.files[key], 'r') as dq_file: - ifo_grp = dq_file[ifo] - if 'dq_segments' not in ifo_grp.keys(): - # if segs are not in file, we must be in LL - if self.dq_state_segments is not None: - raise ValueError( - 'Either all dq stat files must have segments or none' - ) - self.low_latency = True - elif self.low_latency: - raise ValueError( - 'Either all dq stat files must have segments or none' - ) + self.dq_state_segments[ifo] = self.setup_segments(key) def assign_template_bins(self, key): """ @@ -2369,7 +2119,9 @@ def assign_dq_rates(self, key): def setup_segments(self, key): """ - Store segments from stat file + Check if segments definitions are in stat file + If they are, we are running offline and need to store them + If they aren't, we are running online """ ifo = key.split('-')[0] with h5py.File(self.files[key], 'r') as dq_file: @@ -2385,45 +2137,24 @@ def setup_segments(self, key): return dq_state_segs_dict - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - # Inherit from ExpFitFgBgNormStatistic - uf_expfit = ExpFitFgBgNormStatistic.update_file(self, key) - if uf_expfit: - # We have updated a ExpFitFgBgNormStatistic file already - return True - # We also need to check if the DQ files have updated - if key.endswith('dq_stat_info'): - ifo = key.split('-')[0] - logger.info( - "Updating %s statistic %s file", - ifo, - key - ) - self.dq_rates_by_state[ifo] = self.assign_dq_rates(key) - self.dq_bin_by_tid[ifo] = self.assign_template_bins(key) - return True - return False - - def find_dq_noise_rate(self, trigs): + def find_dq_noise_rate(self, trigs, dq_state): """Get dq values for a specific ifo and dq states""" + try: + tnum = trigs.template_num + except AttributeError: + tnum = trigs['template_id'] + try: ifo = trigs.ifo except AttributeError: - ifo = trigs.get('ifo', None) - if ifo is None: - ifo = self.ifos[0] - assert ifo in self.ifos + ifo = trigs['ifo'] + assert len(numpy.unique(ifo)) == 1 + # Should be exactly one ifo provided + ifo = ifo[0] - dq_state = trigs['dq_state'] - dq_val = numpy.ones(len(dq_state)) + dq_val = numpy.zeros(len(dq_state)) - tnum = self.curr_tnum if ifo in self.dq_rates_by_state: for (i, st) in enumerate(dq_state): if isinstance(tnum, numpy.ndarray): @@ -2464,34 +2195,24 @@ def lognoiserate(self, trigs): Array of log noise rate density for each input trigger. """ - dq_rate = self.find_dq_noise_rate(trigs) - dq_rate = numpy.maximum(dq_rate, 1) - - logr_n = ExpFitFgBgNormStatistic.lognoiserate( - self, trigs) - logr_n += numpy.log(dq_rate) - return logr_n - - def single(self, trigs): # make sure every trig has a dq state + try: ifo = trigs.ifo except AttributeError: - ifo = trigs.get('ifo', None) - if ifo is None: - ifo = self.ifos[0] - assert ifo in self.ifos + ifo = trigs['ifo'] + assert len(numpy.unique(ifo)) == 1 + # Should be exactly one ifo provided + ifo = ifo[0] - singles = ExpFitFgBgNormStatistic.single(self, trigs) + dq_state = self.find_dq_state_by_time(ifo, trigs['end_time'][:]) + dq_rate = self.find_dq_noise_rate(trigs, dq_state) + dq_rate = numpy.maximum(dq_rate, 1) - if self.low_latency: - # trigs should already have a dq state assigned - singles['dq_state'] = trigs['dq_state'][:] - else: - singles['dq_state'] = self.find_dq_state_by_time( - ifo, trigs['end_time'][:] - ) - return singles + logr_n = ExpFitFgBgNormStatistic.lognoiserate( + self, trigs) + logr_n += numpy.log(dq_rate) + return logr_n class DQExpFitFgBgKDEStatistic(DQExpFitFgBgNormStatistic): @@ -2524,17 +2245,6 @@ def __init__(self, sngl_ranking, files=None, ifos=None, **kwargs): for kname in self.kde_names: ExpFitFgBgKDEStatistic.assign_kdes(self, kname) - def update_file(self, key): - """ - Update file used in this statistic. - If others are used (i.e. this statistic is inherited), they will - need updated separately - """ - # Inherit from DQExpFitFgBgNormStatistic and ExpFitFgBgKDEStatistic - uf_dq = DQExpFitFgBgNormStatistic.update_file(self, key) - uf_kde = ExpFitFgBgKDEStatistic.update_file(self, key) - return uf_dq or uf_kde - def kde_ratio(self): """ Inherited, see docstring for ExpFitFgBgKDEStatistic.kde_signalrate diff --git a/pycbc/frame/frame.py b/pycbc/frame/frame.py index bea7386418a..a67a3d090d9 100644 --- a/pycbc/frame/frame.py +++ b/pycbc/frame/frame.py @@ -896,8 +896,8 @@ def __init__(self, frame_src, force_update_cache=force_update_cache, increment_update_cache=increment_update_cache) - def flag_at_times(self, start_time, duration, times, padding=0): - """ Check whether the idq flag was on at given times + def indices_of_flag(self, start_time, duration, times, padding=0): + """ Return the indices of the times lying in the flagged region Parameters ---------- @@ -905,45 +905,32 @@ def flag_at_times(self, start_time, duration, times, padding=0): Beginning time to request for duration: int Number of seconds to check. - times: array of floats - Times to check for an active flag padding: float - Amount of time in seconds to flag around samples - below the iDQ FAP threshold + Number of seconds to add around flag inactive times to be considered + inactive as well. Returns ------- - flag_state: numpy.ndarray - Boolean array of whether flag was on at given times + indices: numpy.ndarray + Array of indices marking the location of triggers within valid + time. """ - from pycbc.events.veto import indices_within_times - - # convert start and end times to buffer indices + from pycbc.events.veto import indices_outside_times sr = self.idq.raw_buffer.sample_rate s = int((start_time - self.idq.raw_buffer.start_time - padding) * sr) - 1 e = s + int((duration + padding) * sr) + 1 - - # find samples when iDQ FAP is below threshold and state is valid idq_fap = self.idq.raw_buffer[s:e] + stamps = idq_fap.sample_times.numpy() low_fap = idq_fap.numpy() <= self.threshold idq_valid = self.idq_state.raw_buffer[s:e] idq_valid = idq_valid.numpy().astype(bool) valid_low_fap = numpy.logical_and(idq_valid, low_fap) - - # find times corresponding to the valid low FAP samples glitch_idx = numpy.flatnonzero(valid_low_fap) - stamps = idq_fap.sample_times.numpy() glitch_times = stamps[glitch_idx] - - # construct start and end times of flag segments starts = glitch_times - padding ends = starts + 1.0 / sr + padding * 2.0 - - # check if times were flagged - idx = indices_within_times(times, starts, ends) - flagged_bool = numpy.zeros(len(times), dtype=bool) - flagged_bool[idx] = True - return flagged_bool + idx = indices_outside_times(times, starts, ends) + return idx def advance(self, blocksize): """ Add blocksize seconds more to the buffer, push blocksize seconds diff --git a/pycbc/inference/models/marginalized_gaussian_noise.py b/pycbc/inference/models/marginalized_gaussian_noise.py index 9052a5018ed..0a1342d0432 100644 --- a/pycbc/inference/models/marginalized_gaussian_noise.py +++ b/pycbc/inference/models/marginalized_gaussian_noise.py @@ -19,7 +19,6 @@ """ import itertools -import logging import numpy from scipy import special @@ -208,10 +207,8 @@ class MarginalizedTime(DistMarg, BaseGaussianNoise): def __init__(self, variable_params, data, low_frequency_cutoff, psds=None, high_frequency_cutoff=None, normalize=False, - sample_rate=None, **kwargs): - self.sample_rate = float(sample_rate) self.kwargs = kwargs variable_params, kwargs = self.setup_marginalization( variable_params, @@ -244,14 +241,6 @@ def __init__(self, variable_params, self.dets = {} - if sample_rate is not None: - for ifo in self.data: - if self.sample_rate < self.data[ifo].sample_rate: - raise ValueError("Model sample rate was set less than the" - " data. ") - logging.info("Using %s sample rate for marginalization", - sample_rate) - def _nowaveform_loglr(self): """Convenience function to set loglr values if no waveform generated. """ @@ -307,15 +296,8 @@ def _loglr(self): hp[self._kmin[det]:kmax] *= self._weight[det][slc] hc[self._kmin[det]:kmax] *= self._weight[det][slc] - # Use a higher sample rate if requested - if self.sample_rate is not None: - tlen = int(round(self.sample_rate * - self.whitened_data[det].duration)) - flen = tlen // 2 + 1 - hp.resize(flen) - hc.resize(flen) - self._whitened_data[det].resize(flen) - + hp.resize(len(self._whitened_data[det])) + hc.resize(len(self._whitened_data[det])) cplx_hpd[det], _, _ = matched_filter_core( hp, self._whitened_data[det], @@ -343,20 +325,15 @@ def _loglr(self): for det in wfs: if det not in self.dets: self.dets[det] = Detector(det) - - if self.precalc_antenna_factors: - fp, fc, dt = self.get_precalc_antenna_factors(det) - else: - fp, fc = self.dets[det].antenna_pattern( - params['ra'], - params['dec'], - params['polarization'], - params['tc']) - dt = self.dets[det].time_delay_from_earth_center(params['ra'], - params['dec'], - params['tc']) + fp, fc = self.dets[det].antenna_pattern( + params['ra'], + params['dec'], + params['polarization'], + params['tc']) + dt = self.dets[det].time_delay_from_earth_center(params['ra'], + params['dec'], + params['tc']) dtc = params['tc'] + dt - cplx_hd = fp * cplx_hpd[det].at_time(dtc, interpolate='quadratic') cplx_hd += fc * cplx_hcd[det].at_time(dtc, diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py index 15911d545c9..b89681ecc9f 100644 --- a/pycbc/io/hdf.py +++ b/pycbc/io/hdf.py @@ -28,40 +28,7 @@ logger = logging.getLogger('pycbc.io.hdf') -class HGroup(h5py.Group): - """ Low level extensions to the h5py group object - """ - def create_group(self, name, track_order=None): - """ - Wrapper around h5py's create_group in order to redirect to the - manual HGroup object defined here - """ - if track_order is None: - track_order = h5py.h5.get_config().track_order - - with h5py._objects.phil: - name, lcpl = self._e(name, lcpl=True) - gcpl = HGroup._gcpl_crt_order if track_order else None - gid = h5py.h5g.create(self.id, name, lcpl=lcpl, gcpl=gcpl) - return HGroup(gid) - - def create_dataset(self, name, shape=None, dtype=None, data=None, **kwds): - """ - Wrapper around h5py's create_dataset so that checksums are used - """ - if hasattr(data, 'dtype') and not data.dtype == object: - kwds['fletcher32'] = True - return h5py.Group.create_dataset( - self, - name, - shape=shape, - dtype=dtype, - data=data, - **kwds - ) - - -class HFile(HGroup, h5py.File): +class HFile(h5py.File): """ Low level extensions to the capabilities of reading an hdf5 File """ def select(self, fcn, *args, chunksize=10**6, derived=None, group='', @@ -659,7 +626,6 @@ def trig_dict(self): mtrigs[k] = self.trigs[k][self.mask] else: mtrigs[k] = self.trigs[k][:] - mtrigs['ifo'] = self.ifo return mtrigs @classmethod @@ -721,35 +687,30 @@ def and_masks(self, logic_mask): self.mask[and_indices.astype(np.uint64)] = True def mask_to_n_loudest_clustered_events(self, rank_method, - statistic_threshold=None, + ranking_threshold=6, n_loudest=10, - cluster_window=10, - statistic_kwargs=None): + cluster_window=10): """Edits the mask property of the class to point to the N loudest single detector events as ranked by ranking statistic. Events are clustered so that no more than 1 event within +/- cluster_window will be considered. Can apply a threshold on the - statistic using statistic_threshold + ranking using ranking_threshold """ - if statistic_kwargs is None: - statistic_kwargs = {} sds = rank_method.single(self.trig_dict()) - stat = rank_method.rank_stat_single( - (self.ifo, sds), - **statistic_kwargs - ) + stat = rank_method.rank_stat_single((self.ifo, sds)) if len(stat) == 0: # No triggers at all, so just return here self.apply_mask(np.array([], dtype=np.uint64)) - self.stat = np.array([], dtype=np.uint64) return times = self.end_time - if statistic_threshold is not None: - # Threshold on statistic - keep = stat >= statistic_threshold + if ranking_threshold: + # Threshold on sngl_ranking + # Note that we can provide None or zero to do no thresholding + # but the default is to do some + keep = stat >= ranking_threshold stat = stat[keep] times = times[keep] self.apply_mask(keep) @@ -783,7 +744,6 @@ def mask_to_n_loudest_clustered_events(self, rank_method, index.sort() # Apply to the existing mask self.apply_mask(index) - self.stat = stat[index] @property def mask_size(self): diff --git a/pycbc/live/__init__.py b/pycbc/live/__init__.py index 5a3a40c901a..4037f6634ae 100644 --- a/pycbc/live/__init__.py +++ b/pycbc/live/__init__.py @@ -4,4 +4,3 @@ from .snr_optimizer import * from .significance_fits import * -from .supervision import * diff --git a/pycbc/live/supervision.py b/pycbc/live/supervision.py deleted file mode 100644 index 858dc6c782b..00000000000 --- a/pycbc/live/supervision.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright (C) 2023 Arthur Tolley, Gareth Cabourn Davies -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - -""" -This module contains functions for supervising codes to run regularly -during pycbc_live production, taking input from the search and returning -files which can be used in the search. -This module is primarily used in the pycbc_live_supervise_* programs. -""" - -import logging -import subprocess -import time -import os -from datetime import datetime -from dateutil.relativedelta import relativedelta - -import pycbc - -logger = logging.getLogger('pycbc.live.supervision') - - -def symlink(target, link_name): - """ - Create a symbolic link replacing the destination and checking for - errors. - """ - # Ensure that the target and link name are absolute paths - target = os.path.abspath(target) - link_name = os.path.abspath(link_name) - logger.info("Linking %s to %s", target, link_name) - try: - subprocess.run(['ln', '-sf', target, link_name], check=True) - except subprocess.CalledProcessError as sub_err: - logging.error("Could not link %s to %s", target, link_name) - raise sub_err - - -def dict_to_args(opts_dict): - """ - Convert an option dictionary into a list to be used by subprocess.run - """ - dargs = [] - for option, value in opts_dict.items(): - dargs.append('--' + option.strip()) - if value == '': - # option is a flag, do nothing - continue - if len(value.split()) > 1: - # value is a list, append individually - for v in value.split(): - dargs.append(v) - else: - # Single value option - append once - dargs.append(value) - return dargs - - -def mail_volunteers_error(controls, mail_body_lines, subject): - """ - Email a list of people, defined by mail-volunteers-file - To be used for errors or unusual occurences - """ - with open(controls['mail-volunteers-file'], 'r') as mail_volunteers_file: - volunteers = [volunteer.strip() for volunteer in - mail_volunteers_file.readlines()] - logger.info("Emailing %s with warnings", ' '.join(volunteers)) - mail_command = [ - 'mail', - '-s', - subject - ] - mail_command += volunteers - mail_body = '\n'.join(mail_body_lines) - try: - subprocess.run(mail_command, input=mail_body, text=True, check=True) - except subprocess.CalledProcessError as sub_err: - logging.error("Could not send mail on error") - raise sub_err - - -def run_and_error(command_arguments, controls): - """ - Wrapper around subprocess.run to catch errors and send emails if required - """ - logger.info("Running %s", " ".join(command_arguments)) - command_output = subprocess.run( - command_arguments, - capture_output=True - ) - - if command_output.returncode: - error_contents = [' '.join(command_arguments), '\n', - command_output.stderr.decode()] - if 'mail-volunteers-file' in controls: - mail_volunteers_error( - controls, - error_contents, - f"PyCBC live could not run {command_arguments[0]}" - ) - err_msg = f"Could not run {command_arguments[0]}:\n" - err_msg += ' '.join(error_contents) - raise subprocess.SubprocessError(err_msg) - - -def wait_for_utc_time(target_str): - """Wait until the UTC time is as given by `target_str`, in HH:MM:SS format. - """ - target_hour, target_minute, target_second = map(int, target_str.split(':')) - now = datetime.utcnow() - # for today's target, take now and replace the time - target_today = now + relativedelta( - hour=target_hour, minute=target_minute, second=target_second - ) - # for tomorrow's target, take now, add one day, and replace the time - target_tomorrow = now + relativedelta( - days=1, hour=target_hour, minute=target_minute, second=target_second - ) - next_target = target_today if now <= target_today else target_tomorrow - sleep_seconds = (next_target - now).total_seconds() - logger.info('Waiting %.0f s', sleep_seconds) - time.sleep(sleep_seconds) - - -def ensure_directories(control_values, day_str): - """ - Ensure that the required directories exist - """ - output_dir = os.path.join( - control_values['output-directory'], - day_str - ) - pycbc.makedir(output_dir) - if 'public-dir' in control_values: - # The public directory wil be in subdirectories for the year, month, - # day, e.g. 2024_04_12 will be in 2024/04/12. - public_dir = os.path.join( - control_values['public-dir'], - *day_str.split('_') - ) - pycbc.makedir(public_dir) diff --git a/pycbc/pool.py b/pycbc/pool.py index a770b9537ec..e3606b151cb 100644 --- a/pycbc/pool.py +++ b/pycbc/pool.py @@ -91,7 +91,7 @@ def allmap(self, fcn, args): return results def map(self, func, items, chunksize=None): - """ Catch keyboard interrupts to allow the pool to exit cleanly. + """ Catch keyboard interuppts to allow the pool to exit cleanly. Parameters ---------- @@ -113,13 +113,6 @@ def map(self, func, items, chunksize=None): self.join() raise KeyboardInterrupt - def close_pool(self): - """ Close the pool and remove the reference - """ - self.close() - self.join() - atexit.unregister(_shutdown_pool) - def _dummy_broadcast(self, f, args): self.map(f, [args] * self.size) @@ -137,11 +130,6 @@ def map(self, f, items): imap = map imap_unordered = map - def close_pool(self): - ''' Dummy function to be consistent with BroadcastPool - ''' - pass - def use_mpi(require_mpi=False, log=True): """ Get whether MPI is enabled and if so the current size and rank """ diff --git a/pycbc/results/dq.py b/pycbc/results/dq.py index 7be7bb1d4ff..ce3aeb43b8f 100644 --- a/pycbc/results/dq.py +++ b/pycbc/results/dq.py @@ -22,7 +22,8 @@ """ -data_h1_string = """ +data_h1_string = """H1 +  Summary   @@ -30,7 +31,8 @@ 'https://alog.ligo-wa.caltech.edu/aLOG/includes/search.php?adminType=search'); return true;">aLOG""" -data_l1_string=""" +data_l1_string="""L1 +  Summary   diff --git a/pycbc/results/pygrb_postprocessing_utils.py b/pycbc/results/pygrb_postprocessing_utils.py index 99e562f1df5..638cee6fe14 100644 --- a/pycbc/results/pygrb_postprocessing_utils.py +++ b/pycbc/results/pygrb_postprocessing_utils.py @@ -56,13 +56,14 @@ # * Add to the parser object the arguments used for BestNR calculation # * Add to the parser object the arguments for found/missed injection files # ============================================================================= -def pygrb_initialize_plot_parser(description=None): +def pygrb_initialize_plot_parser(description=None, version=None): """Sets up a basic argument parser object for PyGRB plotting scripts""" formatter_class = argparse.ArgumentDefaultsHelpFormatter parser = argparse.ArgumentParser(description=description, formatter_class=formatter_class) add_common_pycbc_options(parser) + parser.add_argument("--version", action="version", version=version) parser.add_argument("-o", "--output-file", default=None, help="Output file.") parser.add_argument("--x-lims", action="store", default=None, @@ -92,27 +93,8 @@ def pygrb_initialize_plot_parser(description=None): parser.add_argument('--plot-caption', default=None, help="If provided, use the given string as the plot " + "caption") - return parser - - -def pygrb_add_slide_opts(parser): - """Add to parser object arguments related to short timeslides""" - parser.add_argument("--slide-id", type=str, default='0', - help="If all, the plotting scripts will use triggers" + - "from all short slides.") - -def slide_opts_helper(args): - """ - This function overwrites the types of input slide_id information - when loading data in postprocessing scripts. - """ - if args.slide_id.isdigit(): - args.slide_id = int(args.slide_id) - elif args.slide_id.lower() == "all": - args.slide_id = None - else: - raise ValueError("--slide-id must be the string all or an int") + return parser def pygrb_add_injmc_opts(parser): @@ -194,20 +176,6 @@ def pygrb_add_bestnr_cut_opt(parser): "Default 0: all events are considered.") -# ============================================================================= -# Wrapper to pick triggers with certain slide_ids -# ============================================================================= -def slide_filter(trig_file, data, slide_id=None): - """ - This function adds the capability to select triggers with specific - slide_ids during the postprocessing stage of PyGRB. - """ - if slide_id is None: - return data - mask = numpy.where(trig_file['network/slide_id'][:] == slide_id)[0] - return data[mask] - - # ============================================================================= # Wrapper to read segments files # ============================================================================= @@ -392,8 +360,7 @@ def dataset_iterator(g, prefix=''): yield from dataset_iterator(item, path) -def load_triggers(input_file, ifos, vetoes, rw_snr_threshold=None, - slide_id=None): +def load_triggers(input_file, ifos, vetoes, rw_snr_threshold=None): """Loads triggers from PyGRB output file, returning a dictionary""" trigs = HFile(input_file, 'r') @@ -444,10 +411,6 @@ def load_triggers(input_file, ifos, vetoes, rw_snr_threshold=None, else: trigs_dict[path] = dset[above_thresh] - if trigs_dict[path].size == trigs['network/slide_id'][:].size: - trigs_dict[path] = slide_filter(trigs, trigs_dict[path], - slide_id=slide_id) - return trigs_dict diff --git a/pycbc/results/scatter_histograms.py b/pycbc/results/scatter_histograms.py index dac56ab79cc..f89cc5a563f 100644 --- a/pycbc/results/scatter_histograms.py +++ b/pycbc/results/scatter_histograms.py @@ -43,7 +43,7 @@ if 'matplotlib.backends' not in sys.modules: # nopep8 matplotlib.use('agg') -from matplotlib import (offsetbox, pyplot, gridspec, colors) +from matplotlib import (offsetbox, pyplot, gridspec) from pycbc.results import str_utils from pycbc.io import FieldArray @@ -337,7 +337,7 @@ def create_marginalized_hist(ax, values, label, percentiles=None, linestyle='-', plot_marginal_lines=True, title=True, expected_value=None, expected_color='red', rotated=False, - plot_min=None, plot_max=None, log_scale=False): + plot_min=None, plot_max=None): """Plots a 1D marginalized histogram of the given param from the given samples. @@ -380,8 +380,6 @@ def create_marginalized_hist(ax, values, label, percentiles=None, creates. scalefac : {1., float} Factor to scale the default font sizes by. Default is 1 (no scaling). - log_scale : boolean - Should the histogram bins be logarithmically spaced """ if fillcolor is None: htype = 'step' @@ -391,19 +389,7 @@ def create_marginalized_hist(ax, values, label, percentiles=None, orientation = 'horizontal' else: orientation = 'vertical' - if log_scale: - bins = numpy.logspace( - numpy.log10(numpy.nanmin(values)), - numpy.log10(numpy.nanmax(values)), - 50 - ) - else: - bins = numpy.linspace( - numpy.nanmin(values), - numpy.nanmax(values), - 50, - ) - ax.hist(values, bins=bins, histtype=htype, orientation=orientation, + ax.hist(values, bins=50, histtype=htype, orientation=orientation, facecolor=fillcolor, edgecolor=color, ls=linestyle, lw=2, density=True) if percentiles is None: @@ -559,7 +545,6 @@ def create_multidim_plot(parameters, samples, labels=None, marginal_title=True, marginal_linestyle='-', zvals=None, show_colorbar=True, cbar_label=None, vmin=None, vmax=None, scatter_cmap='plasma', - scatter_log_cmap=False, log_parameters=None, plot_density=False, plot_contours=True, density_cmap='viridis', contour_color=None, label_contours=True, @@ -629,10 +614,6 @@ def create_multidim_plot(parameters, samples, labels=None, zvals. scatter_cmap : {'plasma', string} The color map to use for the scatter points. Default is 'plasma'. - scatter_log_cmap : boolean - Should the scatter point coloring be on a log scale? Default False - log_parameters : list or None - Which parameters should be plotted on a log scale plot_density : {False, bool} Plot the density of points as a color map. plot_contours : {True, bool} @@ -668,8 +649,6 @@ def create_multidim_plot(parameters, samples, labels=None, """ if labels is None: labels = {p: p for p in parameters} - if log_parameters is None: - log_parameters = [] # set up the figure with a grid of axes # if only plotting 2 parameters, make the marginal plots smaller nparams = len(parameters) @@ -753,7 +732,6 @@ def create_multidim_plot(parameters, samples, labels=None, create_marginalized_hist( ax, samples[param], label=labels[param], color=hist_color, fillcolor=fill_color, - log_scale=param in log_parameters, plot_marginal_lines=plot_marginal_lines, linestyle=marginal_linestyle, linecolor=line_color, title=marginal_title, expected_value=expected_value, @@ -771,13 +749,8 @@ def create_multidim_plot(parameters, samples, labels=None, alpha = 0.3 else: alpha = 1. - if scatter_log_cmap: - cmap_norm = colors.LogNorm(vmin=vmin, vmax=vmax) - else: - cmap_norm = colors.Normalize(vmin=vmin, vmax=vmax) - plt = ax.scatter(x=samples[px], y=samples[py], c=zvals, s=5, - edgecolors='none', norm=cmap_norm, + edgecolors='none', vmin=vmin, vmax=vmax, cmap=scatter_cmap, alpha=alpha, zorder=2) if plot_contours or plot_density: diff --git a/pycbc/results/static/css/pycbc/orange.css b/pycbc/results/static/css/pycbc/orange.css index 1674f8ae1c2..8b2b44b0aea 100644 --- a/pycbc/results/static/css/pycbc/orange.css +++ b/pycbc/results/static/css/pycbc/orange.css @@ -92,17 +92,3 @@ font-size:16px; a { color:#000000; } - -table { - display: block; - overflow-x: auto; - white-space: nowrap; -} - -td { - text-align: center; -} - -th { - text-align: center; -} diff --git a/pycbc/results/table_utils.py b/pycbc/results/table_utils.py index aec7c62ffdb..dfdaa7297d2 100644 --- a/pycbc/results/table_utils.py +++ b/pycbc/results/table_utils.py @@ -23,10 +23,7 @@ # """ This module provides functions to generate sortable html tables """ -import mako.template -import uuid -import copy -import numpy +import mako.template, uuid google_table_template = mako.template.Template(""" @@ -106,89 +103,42 @@ def html_table(columns, names, page_size=None, format_strings=None): static_table_template = mako.template.Template(""" - % for row in range(n_rows): - % if titles is not None: - - % if row_labels is not None: - - % endif - % for i in range(n_columns): - - % endfor - - % endif - - % for i in range(len(data)): - - % if row_labels is not None: - - % endif - % for j in range(n_columns): - - % endfor - + % if titles is not None: + + % for i in range(len(titles)): + % endfor + + % endif + + % for i in range(len(data)): + + % for j in range(len(data[i])): + + % endfor + % endfor
- - ${titles[row * n_columns + i]} -
- ${row_labels[i]} - - ${data[i][row * n_columns + j]} -
+ ${titles[i]} +
+ ${data[i][j]} +
""") -def static_table(data, titles=None, columns_max=None, row_labels=None): - """ Return an html table of this data +def static_table(data, titles=None): + """ Return an html tableo of this data Parameters ---------- - data : two-dimensional string array + data : two-dimensional numpy string array Array containing the cell values titles : numpy array - Vector str of titles, must be the same length as data - columns_max : integer or None - If given, will restrict the number of columns in the table - row_labels : list of strings - Optional list of row labels to be given as the first cell in - each data row. Does not count towards columns_max + Vector str of titles Returns ------- html_table : str A string containing the html table. """ - data = copy.deepcopy(data) - titles = copy.deepcopy(titles) - row_labels = copy.deepcopy(row_labels) - drows, dcols = numpy.array(data).shape - if titles is not None and not len(titles) == dcols: - raise ValueError("titles and data lengths do not match") - - if row_labels is not None and not len(row_labels) == drows: - raise ValueError( - "row_labels must be the same number of rows supplied to data" - ) - - if columns_max is not None: - n_rows = int(numpy.ceil(len(data[0]) / columns_max)) - n_columns = min(columns_max, len(data[0])) - if len(data[0]) < n_rows * n_columns: - # Pad the data and titles with empty strings - n_missing = int(n_rows * n_columns - len(data[0])) - data = numpy.hstack((data, numpy.zeros((len(data), n_missing), dtype='U1'))) - if titles is not None: - titles += [' '] * n_missing - else: - n_rows = 1 - n_columns = len(data[0]) - - return static_table_template.render( - data=data, - titles=titles, - n_columns=n_columns, - n_rows=n_rows, - row_labels=row_labels, - ) + return static_table_template.render(data=data, titles=titles) diff --git a/pycbc/types/config.py b/pycbc/types/config.py index cdbf67bff95..c690c8f4906 100644 --- a/pycbc/types/config.py +++ b/pycbc/types/config.py @@ -49,7 +49,7 @@ def __deepcopy__(self, memo): self.write(config_string) config_string.seek(0) new_config = self.__class__() - new_config.read_file(config_string) + new_config.readfp(config_string) return new_config diff --git a/pycbc/types/timeseries.py b/pycbc/types/timeseries.py index 3405d8d0942..a004404e125 100644 --- a/pycbc/types/timeseries.py +++ b/pycbc/types/timeseries.py @@ -243,13 +243,10 @@ def get_sample_times(self): def at_time(self, time, nearest_sample=False, interpolate=None, extrapolate=None): - """Return the value of the TimeSeries at the specified GPS time. + """ Return the value at the specified gps time Parameters ---------- - time: scalar or array-like - GPS time at which the value is wanted. Note that LIGOTimeGPS - objects count as scalar. nearest_sample: bool Return the sample at the time nearest to the chosen time rather than rounded down. @@ -257,7 +254,7 @@ def at_time(self, time, nearest_sample=False, Return the interpolated value of the time series. Choices are simple linear or quadratic interpolation. extrapolate: str or float, None - Value to return if time is outside the range of the vector or + Value to return if time is outsidde the range of the vector or method of extrapolating the value. """ if nearest_sample: @@ -281,9 +278,9 @@ def at_time(self, time, nearest_sample=False, keep_idx = _numpy.where(left & right)[0] vtime = vtime[keep_idx] else: - raise ValueError(f"Unsupported extrapolate: {extrapolate}") + raise ValueError("Unsuported extrapolate: %s" % extrapolate) - fi = (vtime - float(self.start_time)) * self.sample_rate + fi = (vtime - float(self.start_time))*self.sample_rate i = _numpy.asarray(_numpy.floor(fi)).astype(int) di = fi - i @@ -308,9 +305,10 @@ def at_time(self, time, nearest_sample=False, ans[keep_idx] = old ans = _numpy.array(ans, ndmin=1) - if _numpy.ndim(time) == 0: + if _numpy.isscalar(time): return ans[0] - return ans + else: + return ans at_times = at_time @@ -762,11 +760,10 @@ def qtransform(self, delta_t=None, delta_f=None, logfsteps=None, # Interpolate if requested if delta_f or delta_t or logfsteps: if return_complex: - interp_amp = interp2d(freqs, times, abs(q_plane), kx=1, ky=1) - interp_phase = interp2d(freqs, times, _numpy.angle(q_plane), - kx=1, ky=1) + interp_amp = interp2d(times, freqs, abs(q_plane.T)) + interp_phase = interp2d(times, freqs, _numpy.angle(q_plane.T)) else: - interp = interp2d(freqs, times, q_plane, kx=1, ky=1) + interp = interp2d(times, freqs, q_plane.T) if delta_t: times = _numpy.arange(float(self.start_time), @@ -780,12 +777,12 @@ def qtransform(self, delta_t=None, delta_f=None, logfsteps=None, if delta_f or delta_t or logfsteps: if return_complex: - q_plane = _numpy.exp(1.0j * interp_phase(freqs, times)) - q_plane *= interp_amp(freqs, times) + q_plane = _numpy.exp(1.0j * interp_phase(times, freqs)) + q_plane *= interp_amp(times, freqs) else: - q_plane = interp(freqs, times) + q_plane = interp(times, freqs) - return times, freqs, q_plane + return times, freqs, q_plane.T def notch_fir(self, f1, f2, order, beta=5.0, remove_corrupted=True): """ notch filter the time series using an FIR filtered generated from diff --git a/pyproject.toml b/pyproject.toml index 1fcf4c3ca7d..af1b3aa113d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,16 @@ [build-system] requires = ["setuptools", "wheel", - "cython>=0.29.21", - "numpy>=2.0.0", + "cython>=3.0.0", + "numpy==1.16.0; python_version <= '3.7'", + "numpy==1.17.3; python_version == '3.8'", + "numpy==1.19.3; python_version == '3.9'", + "numpy==1.21.4; python_version =='3.10'", + "numpy==1.23.4; python_version=='3.11'", + "numpy; python_version >= '3.12'", ] + +# To ensure the best compatibility, try to match the numpy reqs +# where possible to the following used by scipy and the minimum +# of our normal numpy requirements in setup.py +# https://github.com/scipy/oldest-supported-numpy/blob/master/setup.cfg diff --git a/requirements-igwn.txt b/requirements-igwn.txt index f6199d03283..3b7e2d2ce84 100644 --- a/requirements-igwn.txt +++ b/requirements-igwn.txt @@ -1,6 +1,6 @@ # For LDG service access -ligo-proxy-utils ciecplib[kerberos] >= 0.7.0 dqsegdb2 >= 1.1.4 amqplib htchirp >= 2.0 +dqsegdb >= 2.0.0 diff --git a/requirements.txt b/requirements.txt index 3128f18f0a3..25f99b30322 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,17 +17,9 @@ tqdm gwdatafind>=1.1.3 # Requirements for full pegasus env -# https://pegasus.isi.edu/documentation/user-guide/installation.html#mixing-environments-system-venv-conda -# six is listed, but is now not needed. -pegasus-wms.api >= 5.0.8 -boto3 -certifi +pegasus-wms.api >= 5.0.6 +# Need GitPython: See discussion in https://github.com/gwastro/pycbc/pull/4454 GitPython -pyjwt -pyyaml -s3transfer -urllib3 - # need to pin until pegasus for further upstream # addresses incompatibility between old flask/jinja2 and latest markupsafe markupsafe <= 2.0.1 diff --git a/setup.py b/setup.py index 03e2dbf8e9b..d5f769271c9 100755 --- a/setup.py +++ b/setup.py @@ -20,9 +20,11 @@ """ import sys -import os, subprocess +import os, subprocess, shutil import platform +from distutils.command.clean import clean as _clean + from setuptools import Extension, setup, Command from setuptools.command.build_ext import build_ext as _build_ext from setuptools import find_packages @@ -45,7 +47,7 @@ 'tqdm', 'setuptools', 'gwdatafind', - 'pegasus-wms.api >= 5.0.8', + 'pegasus-wms.api >= 5.0.6', 'python-ligo-lw >= 1.7.0', 'ligo-segments', 'lalsuite!=7.2', @@ -81,6 +83,27 @@ def run(self): _build_ext.run(self) + +# Add swig-generated files to the list of things to clean, so they +# get regenerated each time. +class clean(_clean): + def finalize_options (self): + _clean.finalize_options(self) + self.clean_files = [] + self.clean_folders = ['docs/_build'] + def run(self): + _clean.run(self) + for f in self.clean_files: + try: + os.unlink(f) + print('removed ' + f) + except: + pass + + for fol in self.clean_folders: + shutil.rmtree(fol, ignore_errors=True) + print('removed ' + fol) + def get_version_info(): """Get VCS info and write version info to version.py. """ @@ -95,7 +118,7 @@ def __getattr__(self, attr): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '2.5.dev2' + vinfo.version = '2.4.dev3' vinfo.release = 'False' version_script = f"""# coding: utf-8 @@ -169,6 +192,7 @@ def run(self): cmdclass = { 'build_docs': build_docs, 'build_gh_pages': build_gh_pages, + 'clean': clean, 'build_ext': cbuild_ext } @@ -185,21 +209,12 @@ def run(self): # do the actual work of building the package VERSION = get_version_info() -cythonext = ['pycbc.waveform.spa_tmplt_cpu', - 'pycbc.waveform.utils_cpu', - 'pycbc.types.array_cpu', - 'pycbc.filter.matchedfilter_cpu', - 'pycbc.vetoes.chisq_cpu', - "pycbc.fft.fftw_pruned_cython", - "pycbc.events.eventmgr_cython", - "pycbc.events.simd_threshold_cython", - "pycbc.filter.simd_correlate_cython", - "pycbc.waveform.decompress_cpu_cython", - "pycbc.inference.models.relbin_cpu", - ] +cythonext = ['waveform.spa_tmplt', + 'waveform.utils', + 'types.array', + 'filter.matchedfilter', + 'vetoes.chisq'] ext = [] - -libraries = ['m'] # Some platforms / toolchains don't implicitly link this cython_compile_args = ['-O3', '-w', '-ffast-math', '-ffinite-math-only'] @@ -218,18 +233,57 @@ def run(self): cython_compile_args += ["-stdlib=libc++"] cython_link_args += ["-stdlib=libc++"] - for name in cythonext: - fname = name.replace('.', '/') - e = Extension(name, - [f"{fname}.pyx"], - language='c++', + e = Extension("pycbc.%s_cpu" % name, + ["pycbc/%s_cpu.pyx" % name.replace('.', '/')], extra_compile_args=cython_compile_args, extra_link_args=cython_link_args, - libraries=libraries, compiler_directives={'embedsignature': True}) ext.append(e) +# Not all modules work like this: +e = Extension("pycbc.fft.fftw_pruned_cython", + ["pycbc/fft/fftw_pruned_cython.pyx"], + extra_compile_args=cython_compile_args, + extra_link_args=cython_link_args, + compiler_directives={'embedsignature': True}) +ext.append(e) +e = Extension("pycbc.events.eventmgr_cython", + ["pycbc/events/eventmgr_cython.pyx"], + extra_compile_args=cython_compile_args, + extra_link_args=cython_link_args, + compiler_directives={'embedsignature': True}) +ext.append(e) +e = Extension("pycbc.events.simd_threshold_cython", + ["pycbc/events/simd_threshold_cython.pyx"], + language='c++', + extra_compile_args=cython_compile_args, + extra_link_args=cython_link_args, + compiler_directives={'embedsignature': True}) +ext.append(e) +e = Extension("pycbc.filter.simd_correlate_cython", + ["pycbc/filter/simd_correlate_cython.pyx"], + language='c++', + extra_compile_args=cython_compile_args, + extra_link_args=cython_link_args, + compiler_directives={'embedsignature': True}) +ext.append(e) +e = Extension("pycbc.waveform.decompress_cpu_cython", + ["pycbc/waveform/decompress_cpu_cython.pyx"], + language='c++', + extra_compile_args=cython_compile_args, + extra_link_args=cython_link_args, + compiler_directives={'embedsignature': True}) +ext.append(e) +e = Extension("pycbc.inference.models.relbin_cpu", + ["pycbc/inference/models/relbin_cpu.pyx"], + language='c++', + extra_compile_args=cython_compile_args, + extra_link_args=cython_link_args, + compiler_directives={'embedsignature': True}) +ext.append(e) + + setup( name = 'PyCBC', version = VERSION, @@ -238,7 +292,7 @@ def run(self): long_description_content_type='text/markdown', author = 'The PyCBC team', author_email = 'alex.nitz@gmail.org', - url = 'http://pycbc.org/', + url = 'http://www.pycbc.org/', download_url = f'https://github.com/gwastro/pycbc/tarball/v{VERSION}', keywords = [ 'ligo', @@ -259,13 +313,14 @@ def run(self): 'pycbc.neutron_stars': find_files('pycbc/neutron_stars') }, ext_modules = ext, - python_requires='>=3.9', + python_requires='>=3.7', classifiers=[ 'Programming Language :: Python', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', 'Intended Audience :: Science/Research', 'Natural Language :: English', 'Topic :: Scientific/Engineering', diff --git a/test/test_live_coinc_compare.py b/test/test_live_coinc_compare.py index 531bb8d16f0..68c7cdc6e37 100644 --- a/test/test_live_coinc_compare.py +++ b/test/test_live_coinc_compare.py @@ -76,8 +76,7 @@ def setUp(self, *args): timeslide_interval=0.1, background_ifar_limit=100, store_background=True, - coinc_window_pad=0.002, - statistic_refresh_rate=None, + coinc_window_pad=0.002 ) # number of templates in the bank diff --git a/test/test_timeseries.py b/test/test_timeseries.py index 82445f3ab90..cfc9488e8b5 100644 --- a/test/test_timeseries.py +++ b/test/test_timeseries.py @@ -49,7 +49,7 @@ from numpy import ndarray as CPUArray -class TestTimeSeriesBase(array_base, unittest.TestCase): +class TestTimeSeriesBase(array_base,unittest.TestCase): __test__ = False def setUp(self): self.scheme = _scheme @@ -481,10 +481,10 @@ def test_at_time(self): a = TimeSeries([0, 1, 2, 3, 4, 5, 6, 7], delta_t=1.0) self.assertAlmostEqual(a.at_time(0.5), 0.0) - self.assertAlmostEqual(a.at_time(0.6, nearest_sample=True), 1.0) + self.assertAlmostEqual(a.at_time(0.6, nearest_sample=True), 1.0) self.assertAlmostEqual(a.at_time(0.5, interpolate='linear'), 0.5) - self.assertAlmostEqual(a.at_time([2.5], interpolate='quadratic'), 2.5) - self.assertAlmostEqual(a.at_time(lal.LIGOTimeGPS(2.1)), 2.0) + self.assertAlmostEqual(a.at_time([2.5], + interpolate='quadratic'), 2.5) i = numpy.array([-0.2, 0.5, 1.5, 7.0]) @@ -504,11 +504,6 @@ def test_at_time(self): n = numpy.array([0, 0.0, 1.5, 0.0]) self.assertAlmostEqual((x-n).sum(), 0) - # Check that the output corresponds to input being scalar/array. - self.assertEqual(numpy.ndim(a.at_time(0.5)), 0) - self.assertEqual(numpy.ndim(a.at_time(lal.LIGOTimeGPS(2.1))), 0) - self.assertEqual(numpy.ndim(a.at_time(i)), 1) - def test_inject(self): a = TimeSeries(numpy.zeros(2**20, dtype=numpy.float32), delta_t=1.0) diff --git a/tools/docker_build_dist.sh b/tools/docker_build_dist.sh index 14d26c33f29..3acaa64cbe7 100755 --- a/tools/docker_build_dist.sh +++ b/tools/docker_build_dist.sh @@ -43,6 +43,7 @@ if [ "x${PYCBC_CONTAINER}" == "xpycbc_rhel_virtualenv" ]; then yum clean all yum makecache yum -y install openssl-devel + yum -y install ligo-proxy-utils yum -y install python3-virtualenv yum -y install hdf5-static libxml2-static zlib-static libstdc++-static cfitsio-static glibc-static swig fftw-static gsl-static --skip-broken diff --git a/tools/pycbc_test_suite.sh b/tools/pycbc_test_suite.sh index 75cc706a49c..6e1f745194e 100755 --- a/tools/pycbc_test_suite.sh +++ b/tools/pycbc_test_suite.sh @@ -7,78 +7,81 @@ echo -e "\\n>> [`date`] Python Major Version:" $PYTHON_VERSION PYTHON_MINOR_VERSION=`python -c 'import sys; print(sys.version_info.minor)'` echo -e "\\n>> [`date`] Python Minor Version:" $PYTHON_MINOR_VERSION -# This will work from anywhere within the pycbc directory -this_script_dir=`dirname -- "$( readlink -f -- "$0"; )"` -cd $this_script_dir -cd .. - LOG_FILE=$(mktemp -t pycbc-test-log.XXXXXXXXXX) RESULT=0 -cat_output=true - -function test_result { - if test $? -ne 0 ; then - RESULT=1 - echo -e " FAILED!" - if $cat_output ; then - echo -e "---------------------------------------------------------" - cat $LOG_FILE - echo -e "---------------------------------------------------------" - fi - else - echo -e " Pass" - fi -} if [ "$PYCBC_TEST_TYPE" = "unittest" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then for prog in `find test -name '*.py' -print | egrep -v '(long|lalsim|test_waveform)'` do - prog_short=`echo $prog | rev | cut -d"/" -f1 | rev` - echo -e ">> [`date`] running unit test for $prog_short" + echo -e ">> [`date`] running unit test for $prog" python $prog &> $LOG_FILE - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + cat $LOG_FILE + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi done fi if [ "$PYCBC_TEST_TYPE" = "help" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # check that all executables that do not require # special environments can return a help message - for prog in `find ${PATH//:/ } -maxdepth 1 -name 'pycbc*' -print 2>/dev/null | egrep -v '(pycbc_live_nagios_monitor|pycbc_mvsc_get_features)' | sort | uniq` + for prog in `find ${PATH//:/ } -maxdepth 1 -name 'pycbc*' -print 2>/dev/null | egrep -v '(pycbc_live_nagios_monitor|pycbc_make_offline_grb_workflow|pycbc_mvsc_get_features|pycbc_upload_xml_to_gracedb|pycbc_coinc_time)' | sort | uniq` do echo -e ">> [`date`] running $prog --help" $prog --help &> $LOG_FILE - test_result - if [[ `echo $prog | egrep '(pycbc_copy_output_map|pycbc_submit_dax|pycbc_stageout_failed_workflow)'` ]] ; then - continue + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + cat $LOG_FILE + echo -e "---------------------------------------------------------" + else + echo -e " Pass." fi - echo -e ">> [`date`] running $prog --version" - $prog --version &> $LOG_FILE - test_result - done - # also check that --version with increased modifiers works for one executable - echo -e ">> [`date`] running pycbc_inspiral --version with modifiers" - for modifier in "" 0 1 2 3 - do - echo -e ">> [`date`] running pycbc_inspiral --version ${modifier}" - pycbc_inspiral --version ${modifier} &> $LOG_FILE - test_result done + # also check that --version works for one executable + echo -e ">> [`date`] running pycbc_inspiral --version" + pycbc_inspiral --version &> $LOG_FILE + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + cat $LOG_FILE + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi fi -cat_output=false - if [ "$PYCBC_TEST_TYPE" = "search" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # run pycbc inspiral test pushd examples/inspiral bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd # run a quick bank placement example pushd examples/tmpltbank bash -e testNonspin2.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd # run PyCBC Live test @@ -87,14 +90,26 @@ if [ "$PYCBC_TEST_TYPE" = "search" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # broken by a new release of python-ligo-lw pushd examples/live bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd fi # run pycbc_multi_inspiral (PyGRB) test pushd examples/multi_inspiral bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd fi @@ -103,49 +118,97 @@ if [ "$PYCBC_TEST_TYPE" = "inference" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then ## Run inference on 2D-normal analytic likelihood function pushd examples/inference/analytic-normal2d bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run inference on BBH example; this will also run ## a test of create_injections pushd examples/inference/bbh-injection bash -e make_injection.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi # now run inference bash -e run_test.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run inference on GW150914 data pushd examples/inference/gw150914 bash -e run_test.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run inference using single template model pushd examples/inference/single bash -e get.sh bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run inference using relative model pushd examples/inference/relative bash -e get.sh bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run inference using the hierarchical model pushd examples/inference/hierarchical bash -e run_test.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run inference samplers pushd examples/inference/samplers bash -e run.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd ## Run pycbc_make_skymap example @@ -154,7 +217,13 @@ if [ "$PYCBC_TEST_TYPE" = "inference" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then # broken by a new release of python-ligo-lw pushd examples/make_skymap bash -e simulated_data.sh - test_result + if test $? -ne 0 ; then + RESULT=1 + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + else + echo -e " Pass." + fi popd fi fi @@ -163,7 +232,11 @@ if [ "$PYCBC_TEST_TYPE" = "docs" ] || [ -z ${PYCBC_TEST_TYPE+x} ]; then echo -e "\\n>> [`date`] Building documentation" python setup.py build_gh_pages - test_result + if test $? -ne 0 ; then + echo -e " FAILED!" + echo -e "---------------------------------------------------------" + RESULT=1 + fi fi exit ${RESULT} diff --git a/tools/static/cant_be_built b/tools/static/cant_be_built index 3f43e30cd29..a6e0a2a7339 100644 --- a/tools/static/cant_be_built +++ b/tools/static/cant_be_built @@ -5,6 +5,7 @@ pycbc_mvsc_dag pycbc_inspinjfind pycbc_get_loudest_params pycbc_randomize_inj_dist_by_optsnr +pycbc_coinc_time pycbc_inference_plot_prior pycbc_inference_plot_posterior pycbc_fit_sngls_over_param diff --git a/tools/static/needs_full_build b/tools/static/needs_full_build index e5349c77b62..941d3e9cf4b 100644 --- a/tools/static/needs_full_build +++ b/tools/static/needs_full_build @@ -1,3 +1,4 @@ +pycbc_coinc_time pycbc_geom_aligned_2dstack pycbc_plot_glitchgram pycbc_compute_durations diff --git a/tox.ini b/tox.ini index d3d96b2039b..4d0538c3968 100644 --- a/tox.ini +++ b/tox.ini @@ -3,8 +3,7 @@ recreate = true envlist = py-unittest indexserver = preinstall = https://pypi.python.org/simple -requires=tox-conda - setuptools +requires=tox-conda [base] deps = @@ -15,7 +14,7 @@ deps = [testenv] allowlist_externals = bash passenv=LAL_DATA_PATH -conda_deps=openssl +conda_deps=openssl=1.1 conda_channels=conda-forge platform = lin: linux mac: darwin @@ -26,7 +25,7 @@ deps = {[base]deps} pytest ; Needed for `BBHx` package to work with PyCBC - git+https://github.com/titodalcanton/BBHx.git@py39-and-cleanup; sys_platform == 'linux' + git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient @@ -38,10 +37,6 @@ conda_deps= gsl lapack==3.6.1 conda_channels=conda-forge -setenv = - ; Tell the linker to look for shared libs inside the temporary Conda env. - ; Needed to build BBHx's wheel, whick links to LAPACK. - LIBRARY_PATH={envdir}/lib:{env:LIBRARY_PATH:} commands = pytest # The following are long running or may require @@ -71,7 +66,7 @@ commands = bash tools/pycbc_test_suite.sh deps = {[base]deps} ; Needed for `BBHx` package to work with PyCBC - git+https://github.com/titodalcanton/BBHx.git@py39-and-cleanup; sys_platform == 'linux' + git+https://github.com/mikekatz04/BBHx.git@4fff509; sys_platform == 'linux' git+https://github.com/gwastro/BBHX-waveform-model.git; sys_platform == 'linux' conda_deps= mysqlclient @@ -85,9 +80,5 @@ conda_deps= lapack==3.6.1 openmpi conda_channels=conda-forge -setenv = - PYCBC_TEST_TYPE=docs - ; Tell the linker to look for shared libs inside the temporary Conda env. - ; Needed to build BBHx's wheel, whick links to LAPACK. - LIBRARY_PATH={envdir}/lib:{env:LIBRARY_PATH:} +setenv = PYCBC_TEST_TYPE=docs commands = bash tools/pycbc_test_suite.sh