From 21e13dfe646e631b6a36b583f1641c518b3335b4 Mon Sep 17 00:00:00 2001 From: Gareth S Cabourn Davies Date: Wed, 17 Jan 2024 14:43:02 +0000 Subject: [PATCH] Scripts for dealing with preparation of offline gracedb upload files (#4534) * Scripts for dealing with preparation of offline gracedb upload files * Update bin/all_sky_search/pycbc_make_bayestar_skymap Co-authored-by: Tito Dal Canton * reinstate SNR timeseries being saved into HDF * TDC comments * TDC comments 2 * Remove unneeded import --------- Co-authored-by: Tito Dal Canton Co-authored-by: Tito Dal Canton --- bin/all_sky_search/pycbc_make_bayestar_skymap | 85 +++++++ .../pycbc_prepare_xml_for_gracedb | 238 ++++++++++++++++++ .../pycbc_upload_single_event_to_gracedb | 182 ++++++++++++++ pycbc/io/hdf.py | 4 + pycbc/io/live.py | 31 +-- pycbc/results/__init__.py | 1 + pycbc/results/snr.py | 73 ++++++ 7 files changed, 595 insertions(+), 19 deletions(-) create mode 100644 bin/all_sky_search/pycbc_make_bayestar_skymap create mode 100755 bin/all_sky_search/pycbc_prepare_xml_for_gracedb create mode 100755 bin/all_sky_search/pycbc_upload_single_event_to_gracedb create mode 100644 pycbc/results/snr.py diff --git a/bin/all_sky_search/pycbc_make_bayestar_skymap b/bin/all_sky_search/pycbc_make_bayestar_skymap new file mode 100644 index 00000000000..7c4d7c3f255 --- /dev/null +++ b/bin/all_sky_search/pycbc_make_bayestar_skymap @@ -0,0 +1,85 @@ +#!/usr/bin/env python + +""" +Simple wrapper around bayestar-localize-coincs to get sky localization for a +specific compact binary merger event. + +Uses an XML containing SNR timeseries and PSD and calls BAYESTAR to produce a +sky localization. + +The wrapper needs to exist as the XML format doesnt allow us to indicate +the low frequency cutoff or waveform used. We also want to be able to control +the output filename which is not possible in bayestar-localize-coincs + +Unrecognised options will be passed straight to the bayestar subprocess +""" + +import argparse +import subprocess +import shutil +import logging +import os +import glob +import sys +import random +import tempfile +import pycbc.version +from pycbc import init_logging + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('--version', action="version", + version=pycbc.version.git_verbose_msg) +parser.add_argument('--verbose', action='count', + help="Make the logging increasingly more verbose") +parser.add_argument('--bayestar-executable', + help="The bayestar-localize-coinc executable to be run. " + "If not given, will use whatever is available in " + "the current environment.") +parser.add_argument('--event-xml', required=True, + help="XML file containing event information, SNR " + "timeseries and PSD to pass to bayestar") +parser.add_argument('--waveform', default='TaylorF2', + help="Waveform used in the matched-filtering " + "to generate the SNR timeseries.") +parser.add_argument('--low-frequency-cutoff', type=float, default=20, + help="Low-frequency cutoff used in the matched-filtering " + "to generate the SNR timeseries") +parser.add_argument('--output-file', required=True, + help="Filename to output the fits file to.") +args, unknown = parser.parse_known_args() + +# Default logging is set higher than normal for this job +logging_level = args.verbose + 1 if args.verbose else None +init_logging(logging_level) +logging.info("Starting") + +bayestar_exe = args.bayestar_executable or 'bayestar-localize-coincs' + +tmpdir = tempfile.mkdtemp() + +# Set up the command to pass to bayestar. +# The XML file path being passed twice is a legacy requirement, not a mistake. +cmd = [bayestar_exe, + args.event_xml, + args.event_xml, + '--waveform', args.waveform, + '--f-low', str(args.low_frequency_cutoff), + '-o', tmpdir] + +# Pass any unknown options straight to the subprocess +cmd += unknown + +logging.info("Running %s", ' '.join(cmd)) +subprocess.check_output(cmd) + +# Find the fits file in the temporary directory: +# It would be nice to do this better! - maybe use the input xml +# file to find the number which is going to be used? +fits_filenames = glob.glob(os.path.join(tmpdir, '*.fits*')) +if len(fits_filenames) != 1: + raise ValueError(f'argh, got {len(fits_filenames)} FITS files after running BAYESTAR, I want one!') + +logging.info("Moving output to %s", args.output_file) +shutil.move(fits_filenames[0], args.output_file) + +logging.info("Done") diff --git a/bin/all_sky_search/pycbc_prepare_xml_for_gracedb b/bin/all_sky_search/pycbc_prepare_xml_for_gracedb new file mode 100755 index 00000000000..0baf6cb19dc --- /dev/null +++ b/bin/all_sky_search/pycbc_prepare_xml_for_gracedb @@ -0,0 +1,238 @@ +#!/usr/bin/env python + +# Copyright (C) 2015-2023 Ian Harry, Gareth Cabourn Davies +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +""" +Take a coinc xml file containing multiple events and prepare one of them +for upload to gracedb. +""" + +import argparse +import logging +import numpy as np +import h5py +import matplotlib +matplotlib.use('agg') + +import pycbc +import lal +import lal.series +from ligo.lw import lsctables +from ligo.lw import utils as ligolw_utils +from ligo.segments import segment, segmentlist +from pycbc.io.ligolw import ( + LIGOLWContentHandler, + snr_series_to_xml, +) +from pycbc.psd import interpolate +from pycbc.types import FrequencySeries, TimeSeries, load_timeseries +from pycbc.types import MultiDetOptionAction +from pycbc.results import generate_asd_plot, generate_snr_plot + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument("--verbose", action='count', + help="Increase logging level, default=info") +parser.add_argument("--psd-files", nargs='+', required=True, + help='HDF file(s) containing the PSDs to upload') +parser.add_argument("--snr-timeseries", nargs='+', required=True, + help='HDF file(s) containing the SNR timeseries to upload') +parser.add_argument('--input-file', required=True, type=str, + help='Input LIGOLW XML file of coincidences.') +parser.add_argument('--event-id', type=int, required=True, + help='ID for the event being prepared.') +parser.add_argument('--output-file', + help="Output filename for the locally stored XML. ") +parser.add_argument('--snr-timeseries-plot', + help="Output filename for the plot of SNR timeseries. " + "Must be a plot filetype as used by matplotlib.") +parser.add_argument('--psd-plot', + help="Output filename for the plot of the PSD. " + "Must be a plot filetype as used by matplotlib.") +parser.add_argument('--channel-name', action=MultiDetOptionAction, + required=True, + help="Channel name for adding to the uploaded single " + "inspiral table.") +parser.add_argument('--delta-f', type=float, default=0.25, + help="Frequency spacing of the PSD to be uploaded, Hz. " + "Default 0.25") + +args = parser.parse_args() + +if args.verbose: + args.verbose += 1 +else: + args.verbose = 1 +pycbc.init_logging(args.verbose) + +xmldoc = ligolw_utils.load_filename(args.input_file, + contenthandler=LIGOLWContentHandler) + +class psd_segment(segment): + def __new__(cls, psd, *args): + return segment.__new__(cls, *args) + def __init__(self, psd, *args): + self.psd = psd + +def read_psds(psd_files): + logging.info("Reading PSDs") + psds = {} + + for psd_file in psd_files: + (ifo, group), = h5py.File(psd_file, "r").items() + psd = [group["psds"][str(i)] for i in range(len(group["psds"].keys()))] + psds[ifo] = segmentlist(psd_segment(*segargs) for segargs in zip( + psd, group["start_time"], group["end_time"])) + return psds + +psds = read_psds(args.psd_files) + +def read_snr_timeseries(snr_timeseries_files): + """ + Get information from the single_template snr timeseries files + """ + logging.info("Reading SNR timeseries") + snr_timeseries = {} + for snr_filename in snr_timeseries_files: + with h5py.File(snr_filename, 'r') as snr_f: + ifo = snr_f.attrs['ifo'] + time = snr_f.attrs['event_time'] + snr_timeseries[(ifo, time)] = load_timeseries(snr_filename, 'snr') + return snr_timeseries + +snr_timeseries = read_snr_timeseries(args.snr_timeseries) + +coinc_table = lsctables.CoincTable.get_table(xmldoc) +coinc_inspiral_table = lsctables.CoincInspiralTable.get_table(xmldoc) +coinc_event_map_table = lsctables.CoincMapTable.get_table(xmldoc) +sngl_inspiral_table = lsctables.SnglInspiralTable.get_table(xmldoc) + +xmldoc.childNodes[-1].removeChild(sngl_inspiral_table) +xmldoc.childNodes[-1].removeChild(coinc_event_map_table) +xmldoc.childNodes[-1].removeChild(coinc_inspiral_table) +xmldoc.childNodes[-1].removeChild(coinc_table) + +# Find the coinc_table entry corresponding to the desired event ID +for event in coinc_table: + coinc_event_id = event.coinc_event_id + if args.event_id == coinc_event_id: + # This is the event we want + break +else: + raise ValueError(f"event {args.event_id} not found in coinc table") + +coinc_event_table_curr = lsctables.New(lsctables.CoincTable) +coinc_event_table_curr.append(event) +coinc_inspiral_table_curr = lsctables.New(lsctables.CoincInspiralTable) +coinc_event_map_table_curr = lsctables.New(lsctables.CoincMapTable) +sngl_inspiral_table_curr = lsctables.New(lsctables.SnglInspiralTable) + +for coinc_insp in coinc_inspiral_table: + if coinc_insp.coinc_event_id == event.coinc_event_id: + coinc_inspiral_table_curr.append(coinc_insp) + +time = coinc_inspiral_table_curr[0].end_time \ + + coinc_inspiral_table_curr[0].end_time_ns * 1e-9 + +sngl_ids = [] +for coinc_map in coinc_event_map_table: + if coinc_map.coinc_event_id == event.coinc_event_id: + coinc_event_map_table_curr.append(coinc_map) + sngl_ids.append(coinc_map.event_id) + +# Get the SNR timeseries and PSDs at the time of this event +# these may not have contributed a trigger + +# Check that the SNR timeseries correspond to the event +# - allow 0.5s either side of the time +snr_ts = {} +for ifo, t in snr_timeseries.keys(): + if not abs(t - time) < 0.5: + raise ValueError("SNR timeseries for IFO %s does not look like it " + "corresponds to this event, event time %.3f, SNR timeseries is " + "around time %.3f" % (ifo, time, t)) + # Convert the SNR dict to be keyed on IFO-only: + snr_ts[ifo] = snr_timeseries[(ifo, t)] + +# IFOs from SNR timeseries: +psds_event = {} +psddict = {} +for ifo in snr_ts.keys(): + psd = psds[ifo] + + psd = psd[psd.find(time)].psd + # resample the psd to new spacing + psd_fs = FrequencySeries(psd, delta_f=psd.attrs["delta_f"], + dtype=np.float64) + psd_fs = interpolate(psd_fs, args.delta_f) + + psds_event[ifo] = psd + psddict[ifo] = psd_fs + +lal_psddict = {} +sample_freqs = {} + +for sngl in sngl_inspiral_table: + if sngl.event_id not in sngl_ids: + continue + sngl_inspiral_table_curr.append(sngl) + sngl.channel = args.channel_name[sngl.ifo] + + # Two versions of the PSD dictionary have useful info here + psd = psds_event[sngl.ifo] + psd_fs = psddict[sngl.ifo] + + flow = psd.file.attrs['low_frequency_cutoff'] + kmin = int(flow / args.delta_f) + + fseries = lal.CreateREAL8FrequencySeries( + "psd", + lal.LIGOTimeGPS(int(psd.attrs["epoch"])), + kmin * args.delta_f, + args.delta_f, + lal.StrainUnit**2 / lal.HertzUnit, + len(psd_fs) - kmin) + fseries.data.data = psd_fs[kmin:] / np.square(pycbc.DYN_RANGE_FAC) + lal_psddict[sngl.ifo] = fseries + + snr_series_to_xml(snr_ts[sngl.ifo], xmldoc, sngl.event_id) + +xmldoc.childNodes[-1].appendChild(coinc_event_table_curr) +xmldoc.childNodes[-1].appendChild(coinc_inspiral_table_curr) +xmldoc.childNodes[-1].appendChild(coinc_event_map_table_curr) +xmldoc.childNodes[-1].appendChild(sngl_inspiral_table_curr) +lal.series.make_psd_xmldoc(lal_psddict, xmldoc.childNodes[-1]) + +ligolw_utils.write_filename(xmldoc, args.output_file) +logging.info("Saved XML file %s", args.output_file) + +if args.psd_plot: + logging.info("Saving ASD plot %s", args.psd_plot) + generate_asd_plot(psddict, args.psd_plot) + +if args.snr_timeseries_plot: + triggers = {sngl.ifo: (sngl.end_time + sngl.end_time_ns * 1e-9, sngl.snr) + for sngl in sngl_inspiral_table_curr} + base_time = int(np.floor(time)) + logging.info("Saving SNR plot %s", args.snr_timeseries_plot) + generate_snr_plot( + snr_ts, + args.snr_timeseries_plot, + triggers, + base_time + ) + +logging.info('Done!') diff --git a/bin/all_sky_search/pycbc_upload_single_event_to_gracedb b/bin/all_sky_search/pycbc_upload_single_event_to_gracedb new file mode 100755 index 00000000000..78903ebbceb --- /dev/null +++ b/bin/all_sky_search/pycbc_upload_single_event_to_gracedb @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +# Copyright (C) 2015-2023 Ian Harry, Gareth Cabourn Davies +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +""" +Take a coinc xml file containing a single event and upload to gracedb. +""" + +import os +import sys +import argparse +import logging +from ligo.gracedb.rest import GraceDb +import numpy as np + +import pycbc +import lal +from pycbc.io.live import gracedb_tag_with_version + +parser = argparse.ArgumentParser(description=__doc__) +parser.add_argument('--xml-file-for-upload', required=True, type=str, + help='LIGOLW XML file containing the event.') +parser.add_argument('--log-message', type=str, metavar='MESSAGE', + help='Add a log entry to each upload with the given message') +parser.add_argument('--testing', action="store_true", default=False, + help="Upload event to the TEST group of gracedb.") +parser.add_argument('--production-server', action="store_true", default=False, + help="Upload event to production graceDB. If not given " + "events will be uploaded to playground server.") +parser.add_argument('--search-tag', default='AllSky', + help="Specify the search tag. Default: AllSky") +parser.add_argument('--snr-timeseries-plot', + help="SNR timeseries plot to be uploaded with the event.") +parser.add_argument('--asd-plot', + help="ASD plot to be uploaded with the event.") +parser.add_argument('--skymap-plot', + help="Skymap plot to be uploaded with the event.") +parser.add_argument('--skymap-fits-file', + help="Skymap .fits file to be uploaded with the event.") +parser.add_argument('--source-probabilities', + help="Source probabilities file to be uploaded with the " + "event.") +parser.add_argument('--source-probabilities-plot', + help="Source probabilities plot to be uploaded with the " + "event.") +parser.add_argument('--labels', nargs='+', + help="Labels to add to the event in GraceDB") + +args = parser.parse_args() + +pycbc.init_logging(logging.INFO) +# Make the scitokens logger a little quieter +# (it is called through GraceDB) +logging.getLogger('scitokens').setLevel(logging.root.level + 10) + +if args.production_server: + gracedb = GraceDb() +else: + gracedb = GraceDb(service_url='https://gracedb-playground.ligo.org/api/') + +labels = [l.upper() for l in args.labels] +allowed_labels = gracedb.allowed_labels + +if set(labels) - set(allowed_labels): + err_msg = "One or more supplied labels is not available on the server. " + err_msg += f"Supplied {','.join(labels)}, allowed " + err_msg += f"{','.join(allowed_labels)}." + raise RuntimeError(err_msg) + +group_tag = 'Test' if args.testing else 'CBC' +r = gracedb.create_event( + group_tag, + 'pycbc', + args.xml_file_for_upload, + filecontents=open(args.xml_file_for_upload, "rb").read(), + search=args.search_tag, + offline=True, + labels=labels +).json() + +logging.info("Uploaded event %s.", r["graceid"]) + +# add info for tracking code version +gracedb_tag_with_version(gracedb, r['graceid']) + +# document the absolute path to the input file +input_file_str = 'Candidate uploaded from ' \ + + os.path.abspath(args.xml_file_for_upload) +gracedb.write_log(r['graceid'], input_file_str) + +# document the command line used in the event log +log_str = 'Upload command: ' + ' '.join(sys.argv) + +# add the custom log message, if provided +if args.log_message is not None: + log_str += '. ' + args.log_message + +gracedb.write_log( + r['graceid'], + log_str, + tag_name=['analyst_comments'] +) + + +def upload_file(upload_filename, displayname, comment, tag): + """ + Helper function to upload files associated with the event. + """ + logging.info("Uploading %s file %s to event %s.", + displayname, upload_filename, r["graceid"]) + gracedb.write_log( + r["graceid"], + comment, + filename=upload_filename, + tag_name=[tag], + displayName=[displayname] + ) + + +if args.asd_plot: + upload_file( + args.asd_plot, + "ASDs", + "PyCBC ASD estimate from the time of event", + "psd" + ) + +if args.snr_timeseries_plot: + upload_file( + args.snr_timeseries_plot, + "SNR timeseries", + "SNR timeseries plot upload", + "background" + ) + +if args.skymap_plot: + upload_file( + args.skymap_plot, + "", + "Skymap plot upload", + "sky_loc" + ) + +if args.skymap_fits_file: + upload_file( + args.skymap_fits_file, + "", + "sky localization complete", + "sky_loc" + ) + +if args.source_probabilities: + upload_file( + args.source_probabilities, + "", + "Source probabilities JSON file upload", + "pe" + ) + +if args.source_probabilities_plot: + upload_file( + args.source_probabilities_plot, + "", + "Source probabilities plot upload", + "pe" + ) + +logging.info('Done!') diff --git a/pycbc/io/hdf.py b/pycbc/io/hdf.py index 2cd0dd1e450..754c63bf663 100644 --- a/pycbc/io/hdf.py +++ b/pycbc/io/hdf.py @@ -963,6 +963,10 @@ def to_coinc_xml_object(self, file_name): val = sngl_col_vals[name][ifo][0][idx] if name == 'end_time': sngl.end = LIGOTimeGPS(val) + elif name == 'chisq': + # Use reduced chisquared to be consistent with Live + dof = 2. * sngl_col_vals['chisq_dof'][ifo][0][idx] - 2. + sngl.chisq = val / dof else: setattr(sngl, name, val) for name in bank_col_names: diff --git a/pycbc/io/live.py b/pycbc/io/live.py index 0370bf55fa8..fc9e735e8b1 100644 --- a/pycbc/io/live.py +++ b/pycbc/io/live.py @@ -17,8 +17,7 @@ make_psd_xmldoc, snr_series_to_xml ) -from pycbc.results import generate_asd_plot -from pycbc.results import ifo_color +from pycbc.results import generate_asd_plot, generate_snr_plot from pycbc.results import source_color from pycbc.mchirp_area import calc_probabilities @@ -444,27 +443,21 @@ def upload(self, fname, gracedb_server=None, testing=True, snr_series_plot_fname = self.basename + '_snr.png' asd_series_plot_fname = self.basename + '_asd.png' - pl.figure() + triggers = { + ifo: (self.coinc_results[f'foreground/{ifo}/end_time'] + + self.time_offset, + self.coinc_results[f'foreground/{ifo}/snr']) + for ifo in self.et_ifos + } ref_time = int(self.merger_time) + generate_snr_plot(self.snr_series, snr_series_plot_fname, + triggers, ref_time) + + generate_asd_plot(self.psds, asd_series_plot_fname) + for ifo in sorted(self.snr_series): curr_snrs = self.snr_series[ifo] curr_snrs.save(snr_series_fname, group='%s/snr' % ifo) - pl.plot(curr_snrs.sample_times - ref_time, abs(curr_snrs), - c=ifo_color(ifo), label=ifo) - if ifo in self.et_ifos: - base = 'foreground/{}/'.format(ifo) - snr = self.coinc_results[base + 'snr'] - mt = (self.coinc_results[base + 'end_time'] - + self.time_offset) - pl.plot([mt - ref_time], [snr], c=ifo_color(ifo), - marker='x') - pl.legend() - pl.xlabel('GPS time from {:d} (s)'.format(ref_time)) - pl.ylabel('SNR') - pl.savefig(snr_series_plot_fname) - pl.close() - - generate_asd_plot(self.psds, asd_series_plot_fname) # Additionally save the PSDs into the snr_series file for ifo in sorted(self.psds): diff --git a/pycbc/results/__init__.py b/pycbc/results/__init__.py index a12111a2395..63e0aca882b 100644 --- a/pycbc/results/__init__.py +++ b/pycbc/results/__init__.py @@ -4,6 +4,7 @@ from pycbc.results.color import * from pycbc.results.plot import * from pycbc.results.psd import * +from pycbc.results.snr import * from pycbc.results.layout import * from pycbc.results.dq import * from pycbc.results.str_utils import * diff --git a/pycbc/results/snr.py b/pycbc/results/snr.py new file mode 100644 index 00000000000..d96476d6282 --- /dev/null +++ b/pycbc/results/snr.py @@ -0,0 +1,73 @@ +# Copyright (C) 2022 +# Tito Dal Canton, Gareth Cabourn Davies, Ian Harry +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 3 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + + +# +# ============================================================================= +# +# Preamble +# +# ============================================================================= +# +""" +Module to generate SNR figures +""" +import pylab as pl +from pycbc.results import ifo_color + + +def generate_snr_plot(snrdict, output_filename, triggers, ref_time): + """ + Generate an SNR timeseries plot as used for upload to GraceDB. + + Parameters + ---------- + + snrdict: dictionary + A dictionary keyed on ifo containing the SNR + TimeSeries objects + output_filename: string + The filename for the plot to be saved to + triggers : dictionary of tuples + A dictionary keyed on IFO, containing (trigger time, trigger snr) + ref_time : number, GPS seconds + Reference time which will be used as the zero point of the plot + This should be an integer value, but doesn't need to be an integer + + Returns + ------- + None + """ + pl.figure() + ref_time = int(ref_time) + for ifo in sorted(snrdict): + curr_snrs = snrdict[ifo] + + pl.plot(curr_snrs.sample_times - ref_time, abs(curr_snrs), + c=ifo_color(ifo), label=ifo) + if ifo in triggers: + pl.plot(triggers[ifo][0] - ref_time, + triggers[ifo][1], marker='x', c=ifo_color(ifo)) + + pl.legend() + pl.xlabel(f'GPS time from {ref_time:d} (s)') + pl.ylabel('SNR') + pl.savefig(output_filename) + pl.close() + + +__all__ = ["generate_snr_plot"]