Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cherry-picked commits for 2.1.3 release #4604

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
6db484e
Make autogating work for the PyCBC Live early-warning search (#4298)
hoangstephanie Apr 21, 2023
dfd8bdd
Add mchirp / tau0 direct conversions (#4337)
tdent Apr 27, 2023
298677c
missed update to __all__ (#4357)
tdent May 17, 2023
968912a
Use constant chirp time window in snr optimizer (#4338)
tdent May 9, 2023
8c0ecf4
Fix edge case in PyCBC Live's check for horizon distance (#4362)
titodalcanton May 17, 2023
6a69124
Live single-trigger combined fits date order bug (#4369)
GarethCabournDavies May 25, 2023
67cc123
Add argument verification for singles (#4365)
GarethCabournDavies Jul 17, 2023
82e5a8e
Added an OSError exception to MKL check (#4390)
ArthurTolley Jun 13, 2023
6a1e8ae
Move PyCBC venvs to new IGWN CVMFS server (#4412)
spxiwh Jun 26, 2023
92a76fa
Allow SNR optimizer to use candidate point in initial array (#4393)
GarethCabournDavies Jun 28, 2023
276f339
pycbc_optimize_snr: Implement mass dependent bounds on spin parameter…
ArthurTolley Jun 27, 2023
b1838bf
Introduced a new arguement --skymap-only-ifos in pycbc_live (#4346)
SouradeepPal Jul 11, 2023
d344e74
Fixes for Mac.
xangma Apr 5, 2023
96f1ac2
Update pycbc_live
spxiwh May 15, 2023
fc6f449
bugfix - allow non-running of singles (#4439)
GarethCabournDavies Jul 20, 2023
6deb6f3
Live single fits combined bug (#4449)
GarethCabournDavies Aug 4, 2023
138bb95
Live cleanup combine single fits (#4450)
titodalcanton Aug 7, 2023
a9d9016
Threading race condition bugfix for optimize_snr in pycbc_live (#4342)
xangma Aug 8, 2023
7be12f1
Revert "Allow SNR optimizer to use candidate point in initial array (…
titodalcanton Nov 21, 2023
991762d
Set version to 2.1.3
titodalcanton Jan 16, 2024
7ebf243
update lalsimulation cvmfs path (#4580)
GarethCabournDavies Dec 6, 2023
8288a19
Revert "add coordinates_space.py (#4289)"
spxiwh Jan 8, 2024
98cf320
REmoving lisa examples
spxiwh Jan 8, 2024
05c4406
REmove inference examples
spxiwh Jan 8, 2024
2ee05aa
Remove LISA deps
spxiwh Jan 8, 2024
b33d7b7
Removing more LISA things
spxiwh Jan 9, 2024
bb1ae76
Replace gw-openscience.org with gwosc.org (#4326)
titodalcanton Apr 19, 2023
90a940b
Avoid skymap v1.1.0 (#4429)
spxiwh Jul 10, 2023
0c7cfb4
Try to remove the LISA inference example
titodalcanton Jan 18, 2024
9d86314
Force pegasus 5.0.3
titodalcanton Jan 18, 2024
a531b31
Fix Sphinx deprecation (#4331)
titodalcanton Apr 24, 2023
89f4f1e
Try to fix a warning treated as error (duplicated entry in toctree)
titodalcanton Jan 19, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ RUN dnf -y install https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-

# set up environment
RUN cd / && \
mkdir -p /cvmfs/config-osg.opensciencegrid.org /cvmfs/oasis.opensciencegrid.org /cvmfs/gwosc.osgstorage.org && echo "config-osg.opensciencegrid.org /cvmfs/config-osg.opensciencegrid.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "oasis.opensciencegrid.org /cvmfs/oasis.opensciencegrid.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "gwosc.osgstorage.org /cvmfs/gwosc.osgstorage.org cvmfs ro,noauto 0 0" >> /etc/fstab && mkdir -p /oasis /scratch /projects /usr/lib64/slurm /var/run/munge && \
mkdir -p /cvmfs/config-osg.opensciencegrid.org /cvmfs/software.igwn.org /cvmfs/gwosc.osgstorage.org && echo "config-osg.opensciencegrid.org /cvmfs/config-osg.opensciencegrid.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "software.igwn.org /cvmfs/software.igwn.org cvmfs ro,noauto 0 0" >> /etc/fstab && echo "gwosc.osgstorage.org /cvmfs/gwosc.osgstorage.org cvmfs ro,noauto 0 0" >> /etc/fstab && mkdir -p /oasis /scratch /projects /usr/lib64/slurm /var/run/munge && \
groupadd -g 1000 pycbc && useradd -u 1000 -g 1000 -d /opt/pycbc -k /etc/skel -m -s /bin/bash pycbc

# Install MPI software needed for pycbc_inference
Expand All @@ -37,7 +37,7 @@ ENV PATH "/usr/local/bin:/usr/bin:/bin:/lib64/openmpi/bin/bin"
# Set the default LAL_DATA_PATH to point at CVMFS first, then the container.
# Users wanting it to point elsewhere should start docker using:
# docker <cmd> -e LAL_DATA_PATH="/my/new/path"
ENV LAL_DATA_PATH "/cvmfs/oasis.opensciencegrid.org/ligo/sw/pycbc/lalsuite-extra/current/share/lalsimulation:/opt/pycbc/pycbc-software/share/lal-data"
ENV LAL_DATA_PATH "/cvmfs/software.igwn.org/pycbc/lalsuite-extra/current/share/lalsimulation:/opt/pycbc/pycbc-software/share/lal-data"

# When the container is started with
# docker run -it pycbc/pycbc-el8:latest
Expand Down
104 changes: 52 additions & 52 deletions bin/live/pycbc_live_combine_single_fits
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,15 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.

"""Combine PyCBC Live single-detector trigger fitting parameters from several
different files."""

import h5py, numpy as np, argparse
import logging
import pycbc

parser = argparse.ArgumentParser(usage="",
description="Combine fitting parameters from several different files")

parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--verbose", action="store_true",
help="Print extra debugging information", default=False)
parser.add_argument("--trfits-files", nargs="+", required=True,
Expand All @@ -30,7 +33,7 @@ parser.add_argument("--output", required=True,
parser.add_argument("--ifos", required=True, nargs="+",
help="list of ifos fo collect info for")

args=parser.parse_args()
args = parser.parse_args()

pycbc.init_logging(args.verbose)

Expand All @@ -42,8 +45,8 @@ if args.conservative_percentile < 50 or \
"otherwise it is either not a percentile, or not "
"conservative.")

counts_all = {ifo:[] for ifo in args.ifos}
alphas_all = {ifo:[] for ifo in args.ifos}
counts_all = {ifo: [] for ifo in args.ifos}
alphas_all = {ifo: [] for ifo in args.ifos}
analysis_dates = []

with h5py.File(args.trfits_files[0], 'r') as fit_f0:
Expand All @@ -58,15 +61,15 @@ with h5py.File(args.trfits_files[0], 'r') as fit_f0:
fit_thresh = fit_f0.attrs['fit_threshold']
fit_func = fit_f0.attrs['fit_function']

live_times = {ifo : [] for ifo in args.ifos}
live_times = {ifo: [] for ifo in args.ifos}

trigger_file_times = []
trigger_file_starts = []
trigger_file_ends = []

n_files = len(args.trfits_files)
logging.info("Checking through %d files", n_files)

for f in args.trfits_files:

fits_f = h5py.File(f, 'r')
# Check that the file uses the same setup as file 0, to make sure
# all coefficients are comparable
Expand All @@ -77,16 +80,17 @@ for f in args.trfits_files:
assert all(fits_f['bins_lower'][:] == bl)
assert all(fits_f['bins_upper'][:] == bu)

# Get the time of the last trigger in the trigger_fits file
# Get the time of the first/last triggers in the trigger_fits file
gps_last = 0
gps_first = np.inf
for ifo in args.ifos:
if ifo not in fits_f:
continue
else:
trig_times = fits_f[ifo]['triggers']['end_time'][:]
gps_last = max(gps_last,
trig_times.max())
trigger_file_times.append(gps_last)
trig_times = fits_f[ifo]['triggers']['end_time'][:]
gps_last = max(gps_last, trig_times.max())
gps_first = min(gps_first, trig_times.min())
trigger_file_starts.append(gps_first)
trigger_file_ends.append(gps_last)

for ifo in args.ifos:
if ifo not in fits_f:
Expand All @@ -98,66 +102,62 @@ for f in args.trfits_files:
counts_all[ifo].append(fits_f[ifo + '/counts'][:])
alphas_all[ifo].append(fits_f[ifo + '/fit_coeff'][:])
if any(np.isnan(fits_f[ifo + '/fit_coeff'][:])):
logging.info("nan in " + f + ", " + ifo)
logging.info("nan in %s, %s", f, ifo)
logging.info(fits_f[ifo + '/fit_coeff'][:])
fits_f.close()

# Set up the date array, this is stored as an offset from the start date of
# the combination to the end of the trigger_fits file.
# This allows for missing or empty days
# Set up the date array, this is stored as an offset from the first trigger time of
# the first file to the last trigger of the file

trigger_file_times = np.array(trigger_file_times)
ad_order = np.argsort(np.array(trigger_file_times))
start_date_n = trigger_file_times[ad_order[0]]
ad = trigger_file_times[ad_order] - start_date_n
trigger_file_starts = np.array(trigger_file_starts)
trigger_file_ends = np.array(trigger_file_ends)
ad_order = np.argsort(trigger_file_starts)
start_time_n = trigger_file_starts[ad_order[0]]
ad = trigger_file_ends[ad_order] - start_time_n

# Get the counts and alphas sorted by bin rather than by date
# Get the counts and alphas
counts_bin = {ifo: [c for c in zip(*counts_all[ifo])] for ifo in args.ifos}
alphas_bin = {ifo: [a for a in zip(*alphas_all[ifo])] for ifo in args.ifos}

alphas_out = {ifo : np.zeros(len(alphas_bin[ifo])) for ifo in args.ifos}
counts_out = {ifo : np.inf * np.ones(len(counts_bin[ifo])) for ifo in args.ifos}
cons_alphas_out = {ifo : np.zeros(len(alphas_bin[ifo])) for ifo in args.ifos}
cons_counts_out = {ifo : np.inf * np.ones(len(alphas_bin[ifo])) for ifo in args.ifos}
alphas_out = {ifo: np.zeros(len(alphas_bin[ifo])) for ifo in args.ifos}
counts_out = {ifo: np.inf * np.ones(len(counts_bin[ifo])) for ifo in args.ifos}
cons_alphas_out = {ifo: np.zeros(len(alphas_bin[ifo])) for ifo in args.ifos}
cons_counts_out = {ifo: np.inf * np.ones(len(alphas_bin[ifo])) for ifo in args.ifos}

logging.info("Writing results")
fout = h5py.File(args.output, 'w')
fout.attrs['fit_threshold'] = fit_thresh
fout.attrs['conservative_percentile'] = args.conservative_percentile
fout.attrs['ifos'] = args.ifos
fout['bins_edges'] = list(bl) + [bu[-1]]
fout['fits_dates'] = ad + start_date_n
fout['fits_dates'] = ad + start_time_n

for ifo in args.ifos:
fout.create_group(ifo)
fout[ifo].attrs['live_time'] = sum(live_times[ifo])

save_allmeanalpha = {}
for ifo in args.ifos:
fout_ifo = fout[ifo]
logging.info(ifo)
fout_ifo = fout.create_group(ifo)
l_times = np.array(live_times[ifo])
count_all = np.sum(counts_bin[ifo], axis=0) / l_times
invalphan = np.array(counts_bin[ifo]) / np.array(alphas_bin[ifo])
invalphan_all = np.mean(invalphan, axis=0)
alpha_all = np.mean(counts_bin[ifo], axis=0) / invalphan_all
meant = l_times.mean()

fout_ifo[f'separate_fits/live_times'] = l_times
fout_ifo[f'separate_fits/date'] = ad + start_date_n
fout_ifo.attrs['live_time'] = l_times.sum()

fout_ifo['separate_fits/live_times'] = l_times[ad_order]
fout_ifo['separate_fits/start_time'] = trigger_file_starts[ad_order]
fout_ifo['separate_fits/end_time'] = trigger_file_ends[ad_order]

for counter, a_c_u_l in enumerate(zip(alphas_bin[ifo],
counts_bin[ifo], bu, bl)):
a, c, u, l = a_c_u_l
a = np.array(a)
c = np.array(c)
# Sort alpha and counts by date
a = np.array(a)[ad_order]
c = np.array(c)[ad_order]
invalphan = c / a
mean_alpha = c.mean() / invalphan.mean()
cons_alpha = np.percentile(a, 100 - args.conservative_percentile)
cons_alphas_out[ifo][counter] = cons_alpha
alphas_out[ifo][counter] = mean_alpha
cons_count = np.percentile(c, args.conservative_percentile)
cons_counts_out[ifo][counter] = cons_count * len(c)
counts_out[ifo][counter] = c.sum()
# To get the count values, we need to convert to rates and back again
r = c / l_times[ad_order]
cons_rate = np.percentile(r, args.conservative_percentile)
cons_counts_out[ifo][counter] = cons_rate * l_times[ad_order].sum()
counts_out[ifo][counter] = np.mean(r) * l_times[ad_order].sum()

fout_ifo[f'separate_fits/bin_{counter:d}/fit_coeff'] = a
fout_ifo[f'separate_fits/bin_{counter:d}/counts'] = c
Expand All @@ -173,15 +173,15 @@ for ifo in args.ifos:
# Take some averages for plotting and summary values
overall_invalphan = counts_out[ifo] / alphas_out[ifo]
overall_meanalpha = counts_out[ifo].mean() / overall_invalphan.mean()
sum_counts_out = counts_out[ifo].sum() / sum(live_times[ifo])
save_allmeanalpha[ifo] = overall_meanalpha

# For the fixed version, we just set this to 1
fout_ifo['fixed/counts'] = [1 for c in counts_out[ifo]]
fout_ifo['fixed/fit_coeff'] = [0 for a in alphas_out[ifo]]
fout_ifo['fixed/counts'] = [1] * len(counts_out[ifo])
fout_ifo['fixed/fit_coeff'] = [0] * len(alphas_out[ifo])

# Add some useful info to the output file
fout_ifo.attrs['mean_alpha'] = save_allmeanalpha[ifo]
fout_ifo.attrs['mean_alpha'] = overall_meanalpha
fout_ifo.attrs['total_counts'] = counts_out[ifo].sum()

fout.close()

logging.info('Done')
76 changes: 46 additions & 30 deletions bin/live/pycbc_live_plot_combined_single_fits
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ live_total = {}

separate_alphas = {}
separate_counts = {}
separate_dates = {}
separate_starts = {}
separate_ends = {}
separate_times = {}

logging.info("Loading Data")
Expand All @@ -72,7 +73,6 @@ with h5py.File(args.combined_fits_file, 'r') as cff:
bins_edges = cff['bins_edges'][:]
conservative_percentile = cff.attrs['conservative_percentile']
n_bins = len(bins_edges) - 1
fits_dates = cff['fits_dates'][:]
for ifo in ifos:
logging.info(ifo)
live_total[ifo] = cff[ifo].attrs['live_time']
Expand All @@ -81,7 +81,8 @@ with h5py.File(args.combined_fits_file, 'r') as cff:
cons_count[ifo] = cff[ifo]['conservative']['counts'][:]
cons_alpha[ifo] = cff[ifo]['conservative']['fit_coeff'][:]

separate_dates[ifo] = cff[ifo]['separate_fits']['date'][:]
separate_starts[ifo] = cff[ifo]['separate_fits']['start_time'][:]
separate_ends[ifo] = cff[ifo]['separate_fits']['end_time'][:]
separate_times[ifo] = cff[ifo]['separate_fits']['live_times'][:]

separate_data = cff[ifo]['separate_fits']
Expand All @@ -108,6 +109,20 @@ def bin_proportion(upper, lower, log_spacing=False):
else:
return ((lower + upper) / 2. - bin_min) / (bin_max - bin_min)

# Set up the x ticks - note that these are rounded to the nearest
# midnight, so may not line up exactly with the data
min_start = min([separate_starts[ifo].min() for ifo in ifos])
max_end = max([separate_ends[ifo].max() for ifo in ifos])

xtix = []
xtix_labels = []
t = min_start
while t < max_end:
# Strip off the time information, ticks are at midnight
time_dt = gpstime.gps_to_utc(t).date()
xtix_labels.append(time_dt.strftime("%Y-%m-%d"))
xtix.append(gpstime.utc_to_gps(time_dt).gpsSeconds)
t += 86400

logging.info("Plotting fits information")
for ifo in ifos:
Expand All @@ -119,24 +134,23 @@ for ifo in ifos:
alpha_lines = []
count_lines = []

start_date_dt = gpstime.gps_to_utc(separate_dates[ifo][0])
start_date = start_date_dt.strftime("%Y-%m-%d %H:%M:%S")

for i, bl_bu in enumerate(zip(bin_starts, bin_ends)):
bl, bu = bl_bu

alphas = separate_alphas[ifo][i]
counts = separate_counts[ifo][i]

# replace zeros with infinity, so that it is
# not plotted rather than plotted as zero
valid = np.logical_and(alphas > 0, np.isfinite(alphas))

alphas[np.logical_not(valid)] = np.inf

if not any(valid):
logging.warning("No valid fit coefficients for %s", ifo)
continue
a = alphas[valid]
l_times = separate_times[ifo][valid]
rate = counts[valid] / l_times
ad = (separate_dates[ifo][valid] - separate_dates[ifo][0]) / 86400.

l_times = separate_times[ifo]
rate = counts / l_times

ma = mean_alpha[ifo][i]
ca = cons_alpha[ifo][i]
Expand All @@ -146,20 +160,19 @@ for ifo in ifos:
bin_prop = bin_proportion(bu, bl,
log_spacing=args.log_colormap)
bin_colour = plt.get_cmap(args.colormap)(bin_prop)

alpha_lines += ax_alpha.plot(ad, a, c=bin_colour,
label="duration %.2f-%.2f" % (bl, bu))
bin_label = f"duration {bl:.2f}-{bu:.2f}"
alpha_lines += ax_alpha.plot(separate_starts[ifo], alphas, c=bin_colour,
label=bin_label)
alpha_lines.append(ax_alpha.axhline(ma,
label="total fit = %.2f" % ma,
c=bin_colour, linestyle='--',))
c=bin_colour, linestyle='--',))
alpha_lab = f"{conservative_percentile:d}th %ile = {ca:.2f}"
alpha_lines.append(ax_alpha.axhline(ca,
c=bin_colour, linestyle=':',
label=alpha_lab))

count_lines += ax_count.plot(ad, rate,
c=bin_colour,
label="duration %.2f-%.2f" % (bl, bu))
count_lines += ax_count.plot(separate_starts[ifo], rate, c=bin_colour,
label=bin_label)
count_lines.append(ax_count.axhline(mr,
c=bin_colour, linestyle='--',
label=f"mean = {mr:.3f}"))
Expand All @@ -168,21 +181,24 @@ for ifo in ifos:
c=bin_colour, linestyle=':',
label=count_lab))

ax_alpha.set_xlabel('Days since ' + start_date)
ax_alpha.set_ylabel('Fit coefficient')
ax_alpha.set_xlim([ad[0], ad[-1]])
alpha_labels = [l.get_label() for l in alpha_lines]
ax_alpha.grid(zorder=-30)
ax_alpha.legend(alpha_lines, alpha_labels, loc='lower center',
ncol=3, bbox_to_anchor=(0.5, 1.01))
fig_alpha.tight_layout()
fig_alpha.savefig(args.output_plot_name_format.format(ifo=ifo, type='fit_coeffs'))
ax_count.set_xlabel('Days since ' + start_date)
ax_count.set_ylabel('Counts per live time')
ax_count.set_xlim([ad[0], ad[-1]])
ncol=5, bbox_to_anchor=(0.5, 1.01))
ax_alpha.set_ylabel('Fit coefficient')

count_labels = [l.get_label() for l in count_lines]
ax_count.legend(count_lines, count_labels, loc='lower center',
ncol=3, bbox_to_anchor=(0.5, 1.01))
ax_count.grid(zorder=-30)
ncol=5, bbox_to_anchor=(0.5, 1.01))
ax_count.set_ylabel('Rate of triggers above fit threshold [s$^{-1}$]')

for ax in [ax_count, ax_alpha]:
ax.set_xticks(xtix)
ax.set_xticklabels(xtix_labels, rotation=90)
# Add 1/4 day padding either side
ax.set_xlim(xtix[0] - 21600, xtix[-1] + 21600)
ax.grid(zorder=-30)

fig_count.tight_layout()
fig_count.savefig(args.output_plot_name_format.format(ifo=ifo, type='counts'))
fig_alpha.tight_layout()
fig_alpha.savefig(args.output_plot_name_format.format(ifo=ifo, type='fit_coeffs'))
Loading
Loading