Skip to content

Commit

Permalink
Simplify definition of strain buffer length
Browse files Browse the repository at this point in the history
  • Loading branch information
titodalcanton committed Apr 10, 2024
1 parent 52c9445 commit 80ef644
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 16 deletions.
32 changes: 23 additions & 9 deletions bin/pycbc_live
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ class LiveEventManager(object):
self.padata = livepau.PAstroData(args.p_astro_spec, args.bank_file)
self.use_date_prefix = args.day_hour_output_prefix
self.ifar_upload_threshold = args.ifar_upload_threshold
self.pvalue_lookback_time = args.pvalue_lookback_time
self.pvalue_livetime = args.pvalue_combination_livetime
self.gracedb_server = args.gracedb_server
self.gracedb_search = args.gracedb_search
Expand Down Expand Up @@ -221,7 +222,8 @@ class LiveEventManager(object):
self.data_readers[ifo],
self.bank,
template_id,
coinc_times
coinc_times,
lookback=self.pvalue_lookback_time
)
if pvalue_info is None:
continue
Expand Down Expand Up @@ -802,6 +804,20 @@ class LiveEventManager(object):
store_psd[ifo].save(fname, group='%s/psd' % ifo)


def check_max_length(waveforms):
"""Check that the `--max-length` option is sufficient to accomodate the
longest template in the bank and the PSD estimation options.
"""
lengths = numpy.array([1.0 / wf.delta_f for wf in waveforms])
psd_len = args.psd_segment_length * (args.psd_samples // 2 + 1)
max_length = max(lengths.max() + args.pvalue_lookback, psd_len)
if max_length > args.max_length:
raise ValueError(
'--max-length is too short for this template bank. '
f'Use at least {max_length}.'
)


parser = argparse.ArgumentParser(description=__doc__)
pycbc.waveform.bank.add_approximant_arg(parser)
parser.add_argument('--verbose', action='store_true')
Expand Down Expand Up @@ -962,6 +978,10 @@ parser.add_argument('--enable-background-estimation', default=False, action='sto
parser.add_argument('--ifar-double-followup-threshold', type=float, required=True,
help='Inverse-FAR threshold to followup double coincs with'
'additional detectors')
parser.add_argument('--pvalue-lookback-time', type=float, default=150,
metavar='SECONDS',
help='Lookback time for the calculation of the p-value in '
'followup detectors.')
parser.add_argument('--pvalue-combination-livetime', type=float, required=True,
help="Livetime used for p-value combination with followup "
"detectors, in years")
Expand Down Expand Up @@ -1110,13 +1130,10 @@ with ctx:
print(e)
exit()

maxlen = args.psd_segment_length * (args.psd_samples // 2 + 1)
if evnt.rank > 0:
bank.table.sort(order='mchirp')
waveforms = list(bank[evnt.rank-1::evnt.size-1])
lengths = numpy.array([1.0 / wf.delta_f for wf in waveforms])
psd_len = args.psd_segment_length * (args.psd_samples // 2 + 1)
maxlen = max(lengths.max(), psd_len)
check_max_length(waveforms)
mf = LiveBatchMatchedFilter(waveforms, args.snr_threshold,
args.chisq_bins, sg_chisq,
snr_abort_threshold=args.snr_abort_threshold,
Expand All @@ -1138,11 +1155,8 @@ with ctx:
# Initialize the data readers for all detectors. For rank 0, we need data
# from all detectors, including the localization-only ones. For higher
# ranks, we only need the detectors that can generate candidates.
if args.max_length is not None:
maxlen = args.max_length
maxlen = int(maxlen)
data_reader = {
ifo: StrainBuffer.from_cli(ifo, args, maxlen)
ifo: StrainBuffer.from_cli(ifo, args, args.max_length)
for ifo in (evnt.ifos if evnt.rank == 0 else evnt.trigg_ifos)
}
evnt.data_readers = data_reader
Expand Down
14 changes: 7 additions & 7 deletions pycbc/strain/strain.py
Original file line number Diff line number Diff line change
Expand Up @@ -1420,8 +1420,8 @@ def execute_cached_ifft(*args, **kwargs):

class StrainBuffer(pycbc.frame.DataBuffer):
def __init__(self, frame_src, channel_name, start_time,
max_buffer=512,
sample_rate=4096,
max_buffer,
sample_rate,
low_frequency_cutoff=20,
highpass_frequency=15.0,
highpass_reduction=200.0,
Expand Down Expand Up @@ -1462,9 +1462,9 @@ def __init__(self, frame_src, channel_name, start_time,
Name of the channel to read from the frame files
start_time:
Time to start reading from.
max_buffer: {int, 512}, Optional
Length of the buffer in seconds
sample_rate: {int, 2048}, Optional
max_buffer: int
Length of the strain buffer in seconds.
sample_rate: int, Optional
Rate in Hz to sample the data.
low_frequency_cutoff: {float, 20}, Optional
The low frequency cutoff to use for inverse spectrum truncation
Expand Down Expand Up @@ -1536,7 +1536,7 @@ def __init__(self, frame_src, channel_name, start_time,
filesystem.
"""
super(StrainBuffer, self).__init__(frame_src, channel_name, start_time,
max_buffer=32,
max_buffer=max_buffer,
force_update_cache=force_update_cache,
increment_update_cache=increment_update_cache)

Expand Down Expand Up @@ -1988,7 +1988,7 @@ def from_cli(cls, ifo, args, maxlen):
strain_channel = ':'.join([ifo, args.channel_name[ifo]])

return cls(frame_src, strain_channel,
args.start_time, max_buffer=maxlen * 2,
args.start_time, max_buffer=maxlen,
state_channel=state_channel,
data_quality_channel=dq_channel,
idq_channel=idq_channel,
Expand Down

0 comments on commit 80ef644

Please sign in to comment.