From d44e07b1f4c395f37363d862aff9195c9f2b3eb4 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Wed, 19 Jan 2022 21:04:43 +0000 Subject: [PATCH 001/102] add calculation of trigger time and cutting trace to correct length --- NuRadioReco/utilities/noise.py | 88 ++++++++++++++++++++-------------- 1 file changed, 53 insertions(+), 35 deletions(-) diff --git a/NuRadioReco/utilities/noise.py b/NuRadioReco/utilities/noise.py index c78d579ac..d35666e71 100644 --- a/NuRadioReco/utilities/noise.py +++ b/NuRadioReco/utilities/noise.py @@ -2,9 +2,11 @@ from NuRadioReco.modules import channelGenericNoiseAdder from NuRadioReco.utilities import units, fft from NuRadioReco.modules.trigger.highLowThreshold import get_high_low_triggers -from NuRadioReco.detector import detector +from NuRadioReco.detector import generic_detector as detector from scipy import constants import datetime +import scipy +import scipy.signal import copy import time @@ -31,10 +33,11 @@ def rolled_sum_roll(traces, rolling): # assume first trace always has no rolling sumtr = traces[0].copy() - for i in range(1,len(traces)): + for i in range(1, len(traces)): sumtr += np.roll(traces[i], rolling[i]) return sumtr + def rolling_indices(traces, rolling): """ pre calculates rolling index array for rolled sum via take @@ -53,6 +56,7 @@ def rolling_indices(traces, rolling): rolling_indices.append(np.roll(idx, roll)) return np.array(rolling_indices).astype(int) + def rolled_sum_take(traces, rolling_indices): """ calculates rolled sum via np.take @@ -72,10 +76,11 @@ def rolled_sum_take(traces, rolling_indices): # assume first trace always has no rolling sumtr = traces[0].copy() - for i in range(1,len(traces)): + for i in range(1, len(traces)): sumtr += np.take(traces[i], rolling_indices[i]) return sumtr + def rolled_sum_slicing(traces, rolling): """ calculates rolled sum via slicing @@ -95,7 +100,7 @@ def rolled_sum_slicing(traces, rolling): # assume first trace always has no rolling sumtr = traces[0].copy() - for i in range(1,len(traces)): + for i in range(1, len(traces)): r = rolling[i] if r > 0: sumtr[:-r] += traces[i][r:] @@ -108,8 +113,6 @@ def rolled_sum_slicing(traces, rolling): return sumtr - - class thermalNoiseGenerator(): def __init__(self, n_samples, sampling_rate, Vrms, threshold, time_coincidence, n_majority, time_coincidence_majority, @@ -215,7 +218,8 @@ class thermalNoiseGeneratorPhasedArray(): def __init__(self, detector_filename, station_id, triggered_channels, Vrms, threshold, ref_index, - noise_type="rayleigh"): + noise_type="rayleigh", log_level=logging.WARNING, + pre_trigger_time=100 * units.ns, trace_length=512 * units.ns): """ Efficient algorithms to generate thermal noise fluctuations that fulfill a phased array trigger @@ -239,16 +243,23 @@ def __init__(self, detector_filename, station_id, triggered_channels, the type of the noise, can be * "rayleigh" (default) * "noise" + pre_trigger_time: float + the time in the trace before the trigger happens + trace_length: float + the total trace length """ + logger.setLevel(log_level) self.debug = False self.max_amp = 0 self.upsampling = 2 - self.det = detector.Detector(json_filename=detector_filename) + self.det = detector.GenericDetector(json_filename=detector_filename) self.det.update(datetime.datetime(2018, 10, 1)) self.n_samples = self.det.get_number_of_samples(station_id, triggered_channels[0]) # assuming same settings for all channels self.sampling_rate = self.det.get_sampling_frequency(station_id, triggered_channels[0]) + self.pre_trigger_bins = int(pre_trigger_time * self.sampling_rate) + self.n_samples_trigger = int(trace_length * self.sampling_rate) det_channel = self.det.get_channel(station_id, triggered_channels[0]) self.adc_n_bits = det_channel["adc_nbits"] self.adc_noise_n_bits = det_channel["adc_noise_nbits"] @@ -280,7 +291,6 @@ def __init__(self, detector_filename, station_id, triggered_channels, roll = np.array(np.round(np.array(delays) * self.sampling_rate * self.upsampling)).astype(int) self.beam_time_delays[iBeam] = roll - print(self.beam_time_delays) self.Vrms = Vrms self.threshold = threshold self.noise_type = noise_type @@ -297,7 +307,7 @@ def __init__(self, detector_filename, station_id, triggered_channels, passband=[0 * units.MHz, 220 * units.MHz], filter_type='cheby1', order=7, rp=0.1) self.norm = np.trapz(np.abs(self.filt) ** 2, self.ff) self.amplitude = (self.max_freq - self.min_freq) ** 0.5 / self.norm ** 0.5 * self.Vrms - print(f"Vrms = {self.Vrms:.2f}, noise amplitude = {self.amplitude:.2f}, bandwidth = {self.norm/units.MHz:.0f}MHz") + print(f"Vrms = {self.Vrms:.3g}V, noise amplitude = {self.amplitude:.3g}V, bandwidth = {self.norm/units.MHz:.0f}MHz") print(f"frequency range {self.min_freq/units.MHz}MHz - {self.max_freq/units.MHz}MHz") self.adc_ref_voltage = self.Vrms * (2 ** (self.adc_n_bits - 1) - 1) / (2 ** (self.adc_noise_n_bits - 1) - 1) @@ -312,15 +322,14 @@ def __init__(self, detector_filename, station_id, triggered_channels, self.sampling_rate * self.upsampling, self.amplitude, self.noise_type) - def __generation(self): """ separated trace generation part for PA noise trigger """ for iCh in range(self.n_channels): - #spec = self.noise.bandlimited_noise(self.min_freq, self.max_freq, self.n_samples * self.upsampling, + # spec = self.noise.bandlimited_noise(self.min_freq, self.max_freq, self.n_samples * self.upsampling, # self.sampling_rate * self.upsampling, # self.amplitude, self.noise_type, time_domain=False) - + # function that does not re-calculate parameters in each simulated trace spec = self.noise.bandlimited_noise_from_precalculated_parameters(self.noise_type, time_domain=False) spec *= self.filt @@ -328,7 +337,6 @@ def __generation(self): self._traces[iCh] = perfect_floor_comparator(trace, self.adc_n_bits, self.adc_ref_voltage) - def __phasing(self): """ separated phasing part for PA noise trigger """ @@ -349,33 +357,37 @@ def __triggering(self): self.max_amp = 0 # take square over entire array - coh_sum_squared = self._phased_traces**2 + coh_sum_squared = self._phased_traces ** 2 # bin the data into windows of length self.step and normalise to step length - reduced_array = np.add.reduceat(coh_sum_squared.T,np.arange(0,np.shape(coh_sum_squared)[1],self.step)).T / self.step + reduced_array = np.add.reduceat(coh_sum_squared.T, np.arange(0, np.shape(coh_sum_squared)[1], self.step)).T / self.step sliding_windows = [] # self.window can extend over multiple steps, # assuming self.window being an integer multiple of self.step the reduction sums over subsequent steps - steps_per_window = self.window//self.step + steps_per_window = self.window // self.step # better extend the array in order to also trigger on sum of last/first (matching a previous implementation) - extended_reduced_array = np.column_stack([reduced_array, reduced_array[:,0:steps_per_window]]) + extended_reduced_array = np.column_stack([reduced_array, reduced_array[:, 0:steps_per_window]]) for offset in range(steps_per_window): # sum over steps_per_window adjacent steps window_sum = np.add.reduceat(extended_reduced_array.T, np.arange(offset, np.shape(extended_reduced_array)[1], steps_per_window)).T / steps_per_window sliding_windows.append(window_sum) - #self.max_amp = max(np.array(sliding_windows).max(), self.max_amp) + # self.max_amp = max(np.array(sliding_windows).max(), self.max_amp) self.max_amp = np.array(sliding_windows).max() # check if trigger condition is fulfilled anywhere if self.max_amp > self.threshold: - # check in which beam the trigger condition was fulfilled - sliding_windows = np.concatenate(sliding_windows, axis=1) - triggered_beams = np.amax(sliding_windows, axis=1) > self.threshold - for iBeam, is_triggered in enumerate(triggered_beams): - # print out each beam that has triggered - if is_triggered: - logger.info(f"triggered at beam {iBeam}") + sliding_windows = np.array(sliding_windows) + tmp = np.argwhere(sliding_windows > self.threshold) + triggered_step = tmp[0][0] + triggered_bin = tmp[0][2] * (self.window + triggered_step * steps_per_window) if(self.debug): + # check in which beam the trigger condition was fulfilled + sliding_windows = np.concatenate(sliding_windows, axis=1) + triggered_beams = np.amax(sliding_windows, axis=1) > self.threshold + for iBeam, is_triggered in enumerate(triggered_beams): + # print out each beam that has triggered + if is_triggered: + logger.info(f"triggered at beam {iBeam}") import matplotlib.pyplot as plt fig, ax = plt.subplots(5, 1, sharex=True) for iCh in range(self.n_channels): @@ -384,8 +396,8 @@ def __triggering(self): ax[4].plot(self._phased_traces[iBeam]) fig.tight_layout() plt.show() - return True - return False + return True, triggered_bin + return False, None def __triggering_strided(self): """ separated trigger part for PA noise trigger using np.lib.stride_tricks.as_strided """ @@ -398,7 +410,7 @@ def __triggering_strided(self): coh_sum_windowed = np.lib.stride_tricks.as_strided(coh_sum_squared, (num_frames, self.window), (coh_sum_squared.strides[0] * self.step, coh_sum_squared.strides[0])) squared_mean = np.sum(coh_sum_windowed, axis=1) / self.window - #self.max_amp = max(squared_mean.max(), self.max_amp) + # self.max_amp = max(squared_mean.max(), self.max_amp) self.max_amp = max(squared_mean.max(), self.max_amp) if True in (squared_mean > self.threshold): logger.info(f"triggered at beam {iBeam}") @@ -414,7 +426,6 @@ def __triggering_strided(self): return True return False - def generate_noise(self, phasing_mode="slice", trigger_mode="binned_sum", debug=False): """ generates noise traces for all channels that will cause a high/low majority logic trigger @@ -447,7 +458,7 @@ def generate_noise(self, phasing_mode="slice", trigger_mode="binned_sum", debug= while True: counter += 1 if(counter % 1000 == 0): - logger.info(f"{counter:d}, {self.max_amp:.2f}, threshold = {self.threshold:.2f}") + logger.info(f"{counter:d}, {self.max_amp:.2g}, threshold = {self.threshold:.2g}") # some printout for profiling logger.info(f"Time consumption: GENERATION: {dt_generation:.4f}, PHASING: {dt_phasing:.4f}, TRIGGER: {dt_triggering:.4f}") tstart = time.process_time() @@ -467,12 +478,12 @@ def generate_noise(self, phasing_mode="slice", trigger_mode="binned_sum", debug= logger.error(f"Requested phasing_mode {phasing_mode}. Only 'slice' and 'roll' are allowed") raise NotImplementedError(f"Requested phasing_mode {phasing_mode}. Only 'slice' and 'roll' are allowed") - # time profiling phasing + # time profiling phasing dt_phasing += time.process_time() - tstart tstart = time.process_time() if trigger_mode == "binned_sum": - is_triggered = self.__triggering() + is_triggered, triggered_bin = self.__triggering() elif trigger_mode == "stride": # more time consuming attempt to do triggering compared to taking binned sums is_triggered = self.__triggering_strided() @@ -484,7 +495,14 @@ def generate_noise(self, phasing_mode="slice", trigger_mode="binned_sum", debug= dt_triggering += time.process_time() - tstart if is_triggered: - return self._traces, self._phased_traces + triggered_bin = triggered_bin // 2 # the trace is cut in the downsampled version. Therefore, triggered bin is factor of two smaller. + i_low = triggered_bin - self.pre_trigger_bins + i_high = i_low + self.n_samples_trigger + if (i_low >= 0) and (i_high < self.n_samples): + # traces need to be downsampled + # resample and use axis -1 since trace might be either shape (N) for analytic trace or shape (3,N) for E-field + self._traces = scipy.signal.resample(self._traces, np.shape(self._traces)[-1] // self.upsampling, axis=-1) + return self._traces[:, i_low:i_high], self._phased_traces def generate_noise2(self, debug=False): """ @@ -499,7 +517,7 @@ def generate_noise2(self, debug=False): while True: counter += 1 if(counter % 1000 == 0): - print(f"{counter:d}, {max_amp:.2f}, threshold = {self.threshold:.2f}") + print(f"{counter:d}, {max_amp:.3g}, threshold = {self.threshold:.3g}") for iCh in range(self.n_channels): spec = self.noise.bandlimited_noise(self.min_freq, self.max_freq, self.n_samples * self.upsampling, self.sampling_rate * self.upsampling, From ec31b0360b60a0ee1dcb10ce0be6c254443b0c4a Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Thu, 20 Jan 2022 12:14:57 +0000 Subject: [PATCH 002/102] update changelog, change to Philox --- NuRadioReco/modules/channelGenericNoiseAdder.py | 8 ++++---- changelog.txt | 3 +++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/NuRadioReco/modules/channelGenericNoiseAdder.py b/NuRadioReco/modules/channelGenericNoiseAdder.py index e8a4acfd9..e5556c02a 100644 --- a/NuRadioReco/modules/channelGenericNoiseAdder.py +++ b/NuRadioReco/modules/channelGenericNoiseAdder.py @@ -2,7 +2,7 @@ from NuRadioReco.modules.base.module import register_run import numpy as np from NuRadioReco.utilities import units, fft -import numpy.random +from numpy.random import Generator, Philox import logging @@ -27,7 +27,7 @@ def add_random_phases(self, amps, n_samples_time_domain): """ amps = np.array(amps, dtype='complex') Np = (n_samples_time_domain - 1) // 2 - phases = self.__random_generator.rand(Np) * 2 * np.pi + phases = self.__random_generator.random(Np) * 2 * np.pi phases = np.cos(phases) + 1j * np.sin(phases) amps[1:Np + 1] *= phases # Note that the last entry of the index slice is f[Np] ! @@ -45,7 +45,7 @@ def fftnoise_fullfft(self, f): """ f = np.array(f, dtype='complex') Np = (len(f) - 1) // 2 - phases = self.__random_generator.rand(Np) * 2 * np.pi + phases = self.__random_generator.random(Np) * 2 * np.pi phases = np.cos(phases) + 1j * np.sin(phases) f[1:Np + 1] *= phases # Note that the last entry of the index slice is f[Np] ! f[-1:-1 - Np:-1] = np.conj(f[1:Np + 1]) @@ -308,7 +308,7 @@ def __init__(self): def begin(self, debug=False, seed=None): self.__debug = debug - self.__random_generator = np.random.RandomState(seed) + self.__random_generator = Generator(Philox(seed)) if debug: self.logger.setLevel(logging.DEBUG) diff --git a/changelog.txt b/changelog.txt index ccafe7d7e..3fbc11bed 100644 --- a/changelog.txt +++ b/changelog.txt @@ -2,6 +2,9 @@ Changelog - to keep track of all relevant changes please update the categories "new features" and "bugfixes" before a pull request merge! +version 2.1.6-dev +features: +- calculate trigger time and cut trace accordingly in the phased array noise generation utility class. version 2.1.5 bugfixes: From 7b36068212db20904ea4258f01789202e6fbb4c2 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Thu, 12 May 2022 12:24:48 +0000 Subject: [PATCH 003/102] show that failed tests are due to change in random generator --- NuRadioReco/modules/channelGenericNoiseAdder.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/channelGenericNoiseAdder.py b/NuRadioReco/modules/channelGenericNoiseAdder.py index e5556c02a..8f2f9bf57 100644 --- a/NuRadioReco/modules/channelGenericNoiseAdder.py +++ b/NuRadioReco/modules/channelGenericNoiseAdder.py @@ -3,6 +3,7 @@ import numpy as np from NuRadioReco.utilities import units, fft from numpy.random import Generator, Philox +import numpy.random import logging @@ -308,7 +309,8 @@ def __init__(self): def begin(self, debug=False, seed=None): self.__debug = debug - self.__random_generator = Generator(Philox(seed)) + self.__random_generator = np.random.RandomState(seed) + # self.__random_generator = Generator(Philox(seed)) if debug: self.logger.setLevel(logging.DEBUG) From 2c167acfb15ea3cf18d99937ec4510d82dd3ba62 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Thu, 12 May 2022 13:53:35 +0000 Subject: [PATCH 004/102] change noise generator back to Philox --- NuRadioMC/test/SingleEvents/validate.sh | 4 ++-- NuRadioReco/modules/channelGenericNoiseAdder.py | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/NuRadioMC/test/SingleEvents/validate.sh b/NuRadioMC/test/SingleEvents/validate.sh index 15b567380..920bf15af 100755 --- a/NuRadioMC/test/SingleEvents/validate.sh +++ b/NuRadioMC/test/SingleEvents/validate.sh @@ -1,5 +1,5 @@ #!/bin/bash -python T02RunSimulation.py 1e18_output_reference.hdf5 surface_station_1GHz.json config.yaml 1e18_output.hdf5 1e18_output.nur +python3 T02RunSimulation.py 1e18_output_reference.hdf5 surface_station_1GHz.json config.yaml 1e18_output.hdf5 1e18_output.nur -python T03validate.py 1e18_output.hdf5 1e18_output_reference.hdf5 +python3 T03validate.py 1e18_output.hdf5 1e18_output_reference.hdf5 diff --git a/NuRadioReco/modules/channelGenericNoiseAdder.py b/NuRadioReco/modules/channelGenericNoiseAdder.py index 8f2f9bf57..e5556c02a 100644 --- a/NuRadioReco/modules/channelGenericNoiseAdder.py +++ b/NuRadioReco/modules/channelGenericNoiseAdder.py @@ -3,7 +3,6 @@ import numpy as np from NuRadioReco.utilities import units, fft from numpy.random import Generator, Philox -import numpy.random import logging @@ -309,8 +308,7 @@ def __init__(self): def begin(self, debug=False, seed=None): self.__debug = debug - self.__random_generator = np.random.RandomState(seed) - # self.__random_generator = Generator(Philox(seed)) + self.__random_generator = Generator(Philox(seed)) if debug: self.logger.setLevel(logging.DEBUG) From 7efa7ce7ef5958aff3f65f9fb53cbef1b3aeeae0 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Thu, 26 Jan 2023 18:55:00 +0000 Subject: [PATCH 005/102] update version number --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f81a92bfd..f6b69b708 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "NuRadioMC" -version = "2.2.0-dev" +version = "2.1.7" authors = ["Christian Glaser et al."] homepage = "https://github.com/nu-radio/NuRadioMC" documentation = "https://nu-radio.github.io/NuRadioMC/main.html" From 393a08305843abb1214ec0b68e464d24f319a976 Mon Sep 17 00:00:00 2001 From: alisanozdrina Date: Tue, 21 Mar 2023 12:22:22 -0500 Subject: [PATCH 006/102] Update ara_detector_db.json Removed extra curly brace from line 2. Doesn't match proper json file formatting.. --- NuRadioReco/detector/ARA/ara_detector_db.json | 1 - 1 file changed, 1 deletion(-) diff --git a/NuRadioReco/detector/ARA/ara_detector_db.json b/NuRadioReco/detector/ARA/ara_detector_db.json index ec5fa08bb..b0f194bb9 100644 --- a/NuRadioReco/detector/ARA/ara_detector_db.json +++ b/NuRadioReco/detector/ARA/ara_detector_db.json @@ -1,4 +1,3 @@ -{ { "_default": {}, "channels": { From 2b5b9ee9fa2a8b4f96655021053b591d12809d6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 7 Mar 2023 14:46:29 +0100 Subject: [PATCH 007/102] Add RNO-G data reader (based on mattak) and, based on this reader, a noiseImporter module for RNO-G --- .../modules/io/rno_g/readRNOGDataMattak.py | 205 ++++++++++++++++++ .../measured_noise/noiseImporterRNOG.py | 142 ++++++++++++ 2 files changed, 347 insertions(+) create mode 100644 NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py create mode 100644 NuRadioReco/modules/measured_noise/noiseImporterRNOG.py diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py new file mode 100644 index 000000000..7dfc60644 --- /dev/null +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -0,0 +1,205 @@ +import numpy as np + +import logging +import os +import time +import datetime + +from NuRadioReco.modules.base.module import register_run + +import NuRadioReco.framework.event +import NuRadioReco.framework.station +import NuRadioReco.framework.channel +import NuRadioReco.framework.trigger + +from NuRadioReco.utilities import units + +import mattak.Dataset + + +def baseline_correction_128(wfs): + + # Get baseline in chunks of 128 bins + # np.split -> (16, n_events, n_channels, 128) + # np.mean -> (16, n_events, n_channels) + means = np.mean(np.split(wfs, 2048 // 128, axis=-1), axis=-1) + + # Get baseline traces + # np.repeat -> (2048, n_events, n_channels) + baseline_traces = np.repeat(means, 128 % 2048, axis=0) + + # np.moveaxis -> (n_events, n_channels, 2048) + baseline_traces = np.moveaxis(baseline_traces, 0, -1) + + return wfs - baseline_traces + + +class readRNOGData: + + def begin(self, + data_dirs, log_level=logging.INFO, selectors=None, + read_calibrated_data=False, + apply_baseline_correction=True, + convert_to_voltage=True, + select_triggers=None): + + """ + + Parameters + ---------- + + data_dirs: list of strings / string + Path to run directories (i.e. ".../stationXX/runXXX/") + + log_level: enum + Set verbosity level of logger + + selectors: list of lambdas + List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. + Example: + + trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" + + read_calibrated_data: bool + If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. + (temp. Default: False) + + apply_baseline_correction: bool + Only applies when non-calibrated data are read. If true, correct for DC offset. + (Default: True) + + convert_to_voltage: bool + Only applies when non-calibrated data are read. If true, convert ADC to voltage. + (Default: True) + """ + + t0 = time.time() + + self.logger = logging.getLogger('NuRadioReco.readRNOGData') + self.logger.setLevel(log_level) + + self._read_calibrated_data = read_calibrated_data + self._apply_baseline_correction = apply_baseline_correction + self._convert_to_voltage = convert_to_voltage + + # Temporary solution hard-coded values from Cosmin. Only used when uncalibrated data + # is read and convert_to_voltage is True. + self._adc_ref_voltage_range = 2.5 * units.volt + self._adc_n_bits = 12 + + if not isinstance(data_dirs, (list, np.ndarray)): + data_dirs = [data_dirs] + + if selectors is not None: + if not isinstance(selectors, (list, np.ndarray)): + selectors = [selectors] + + if select_triggers is not None: + if isinstance(select_triggers, str): + selectors.append(lambda eventInfo: eventInfo.triggerType == select_triggers) + else: + for select_trigger in select_triggers: + selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) + + self._selectors = selectors + self.logger.info(f"Found {len(self._selectors)} selectors") + + self._time_begin = 0 + self._time_run = 0 + self.__counter = 0 + self.__skipped = 0 + + self._datasets = [] + self.__n_events_per_dataset = [] + + self.logger.info(f"Parse through {len(data_dirs)} directories.") + + for data_dir in data_dirs: + + if not os.path.exists(data_dir): + self.logger.error(f"The directory {data_dir} does not exist") + + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir) + self._datasets.append(dataset) + self.__n_events_per_dataset.append(dataset.N()) + + # keeps track which event index is in which dataset + self._event_idxs_datasets = np.cumsum(self.__n_events_per_dataset) + self._n_events_total = np.sum(self.__n_events_per_dataset) + + self._time_begin = time.time() - t0 + + + def get_n_events_of_prev_datasets(self, dataset_idx): + dataset_idx_prev = dataset_idx - 1 + return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 + + + @register_run() + def run(self): + + for event_idx in range(self._n_events_total): + self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") + t0 = time.time() + + # find correct dataset + dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) + dataset = self._datasets[dataset_idx] + + event_idx_in_dataset = event_idx - self.get_n_events_of_prev_datasets(dataset_idx) + dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event + + event_info = dataset.eventInfo() + + skip = False + if self._selectors is not None: + for selector in self._selectors: + if not selector(event_info): + skip = True + + if skip: + self.__skipped += 1 + continue + + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) + station = NuRadioReco.framework.station.Station(event_info.station) + station.set_station_time(datetime.datetime.fromtimestamp(event_info.readoutTime)) + + trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) + trigger.set_triggered() + trigger.set_trigger_time(event_info.triggerTime) + station.set_trigger(trigger) + + waveforms = dataset.wfs() + + for channel_id, wf in enumerate(waveforms): + channel = NuRadioReco.framework.channel.Channel(channel_id) + if self._read_calibrated_data: + channel.set_trace(wf * units.mV, event_info.sampleRate * units.GHz) + else: + # wf stores ADC counts + + if self._apply_baseline_correction: + # correct baseline + wf = baseline_correction_128(wf) + + if self._convert_to_voltage: + # convert adc to voltage + wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) + + channel.set_trace(wf, event_info.sampleRate * units.GHz) + + station.add_channel(channel) + + evt.set_station(station) + + self._time_run += time.time() - t0 + self.__counter += 1 + yield evt + + + def end(self): + self.logger.info(f"Read {self.__counter} events (skipped {self.__skipped} events)" + f"\n\tTime to initialize data sets : {self._time_begin:.2f}" + f"\n\tTime to initialize all events : {self._time_run:.2f}" + f"\n\tTime to per event : {self._time_run / self.__counter:.2f}") \ No newline at end of file diff --git a/NuRadioReco/modules/measured_noise/noiseImporterRNOG.py b/NuRadioReco/modules/measured_noise/noiseImporterRNOG.py new file mode 100644 index 000000000..ff55705e2 --- /dev/null +++ b/NuRadioReco/modules/measured_noise/noiseImporterRNOG.py @@ -0,0 +1,142 @@ +import numpy as np +import glob +import os +import sys + +from NuRadioReco.modules.io.rno_g.readRNOGDataMattak import readRNOGData +from NuRadioReco.modules.base.module import register_run +from NuRadioReco.utilities import units + +from NuRadioReco.modules import channelResampler + +import logging + + +class noiseImporterRNOG: + """ + Imports recorded traces from RNOG stations. + + """ + + + def begin(self, noise_folder, station_ids=None, + channel_mapping=None, log_level=logging.INFO, + convert_noise_to_voltage=True, + match_station_ids=False): + """ + + Parameters + ---------- + noise_folder: string + the folder containing the noise file or subfolders containing noise files + + station_ids: list(int) + the station ids from which to add noise () + + channel_mapping: dict or None + option relevant for MC studies of new station designs where we do not + have forced triggers for. The channel_mapping dictionary maps the channel + ids of the MC station to the channel ids of the noise data + Default is None which is 1-to-1 mapping + + log_level: loggging log level + the log level, default logging.INFO + + """ + + self.__channel_mapping = channel_mapping + self.__station_ids = station_ids + self._convert_noise_to_voltage = convert_noise_to_voltage + self._match_station_ids = match_station_ids + + self._channel_respampler = channelResampler.channelResampler() + + self.logger = logging.getLogger('noiseImporter') + + self.logger.setLevel(log_level) + self.__channel_mapping = channel_mapping + + noise_files = glob.glob(f"{noise_folder}/**/*root", recursive=True) + self.__noise_folders = np.unique([os.path.dirname(e) for e in noise_files]) + + self.logger.info(f"Found {len(self.__noise_folders)} folders in {noise_folder}") + if not len(self.__noise_folders): + raise ValueError + + noise_reader = readRNOGData() + selectors = [lambda einfo: einfo.triggerType == "FORCE"] + noise_reader.begin(self.__noise_folders, selectors=selectors) + self._noise_events = [evt for evt in noise_reader.run()] + noise_reader.end() + + + def _buffer_station_id_list(self): + if self.__station_id_list is None: + self.__station_id_list = np.squeeze([evt.get_station_ids() for evt in self._noise_events]) + + return self.__station_id_list + + + def __get_noise_channel(self, channel_id): + if self.__channel_mapping is None: + return channel_id + else: + return self.__channel_mapping[channel_id] + + + @register_run() + def run(self, evt, station, det): + + if self._match_station_ids: + + station_ids = self._buffer_station_id_list() + mask = station_ids == station.get_id() + if not np.any(mask): + raise ValueError(f"No station with id {station.get_id()} in noise data.") + + i_noise = np.random.choice(np.arange(len(mask))[mask]) + + else: + i_noise = np.random.randint(0, len(self._noise_events)) + + noise_event = self._noise_events[i_noise] + + station_id = noise_event.get_station_ids()[0] + noise_station = noise_event.get_station(station_id) + + if self.__station_ids is not None and not station_id in self.__station_ids: + raise KeyError() + + self.logger.debug("Selected noise event {} ({}, run {}, event {})".format( + i_noise, noise_station.get_station_time(), noise_event.get_run_number(), + noise_event.get_id())) + + for channel in station.iter_channels(): + channel_id = channel.get_id() + + trace = channel.get_trace() + noise_channel = noise_station.get_channel(self.__get_noise_channel(channel_id)) + noise_trace = noise_channel.get_trace() + + if len(trace) > 2048: + self.logger.warn("Simulated trace is longer than 2048 bins... trim with :2048") + trace = trace[:2048] + + # sanity checks + if len(trace) != len(noise_trace): + erg_msg = f"Mismatch in trace lenght: Noise has {len(noise_trace)} " + \ + "and simulation has {len(trace)} samples" + self.logger.error(erg_msg) + raise ValueError(erg_msg) + + if channel.get_sampling_rate() != noise_channel.get_sampling_rate(): + erg_msg = "Mismatch in sampling rate: Noise has {} and simulation has {} GHz".format( + noise_channel.get_sampling_rate() / units.GHz, channel.get_sampling_rate() / units.GHz) + self.logger.error(erg_msg) + raise ValueError(erg_msg) + + trace = trace + noise_trace + channel.set_trace(trace, channel.get_sampling_rate()) + + def end(self): + pass From 38a5eec5ebc0224d53f707d4064afd25d4794613 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 27 Mar 2023 12:11:43 +0200 Subject: [PATCH 008/102] Add substructur for RNO-G noise importer --- NuRadioReco/modules/measured_noise/RNO-G/__init__.py | 0 .../{noiseImporterRNOG.py => RNO-G/noiseImporter.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 NuRadioReco/modules/measured_noise/RNO-G/__init__.py rename NuRadioReco/modules/measured_noise/{noiseImporterRNOG.py => RNO-G/noiseImporter.py} (100%) diff --git a/NuRadioReco/modules/measured_noise/RNO-G/__init__.py b/NuRadioReco/modules/measured_noise/RNO-G/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/NuRadioReco/modules/measured_noise/noiseImporterRNOG.py b/NuRadioReco/modules/measured_noise/RNO-G/noiseImporter.py similarity index 100% rename from NuRadioReco/modules/measured_noise/noiseImporterRNOG.py rename to NuRadioReco/modules/measured_noise/RNO-G/noiseImporter.py From 1451221ae3e05e336d8a8967a5590a89ba9bb375 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 28 Mar 2023 12:09:16 +0200 Subject: [PATCH 009/102] Add interface to RunTable to select runs --- .../modules/io/rno_g/readRNOGDataMattak.py | 95 +++++++++++++++---- 1 file changed, 79 insertions(+), 16 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 7dfc60644..0a6aade6a 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -14,24 +14,42 @@ from NuRadioReco.utilities import units +try: + from rnog_data.runtable import RunTable + import pandas + imported_runtable = True +except ImportError: + print("Import of run table failed. You will not be able to select runs! \n" + "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") + imported_runtable = False + + + import mattak.Dataset -def baseline_correction_128(wfs): +def baseline_correction(wfs, n_bins=128): - # Get baseline in chunks of 128 bins - # np.split -> (16, n_events, n_channels, 128) - # np.mean -> (16, n_events, n_channels) - means = np.mean(np.split(wfs, 2048 // 128, axis=-1), axis=-1) + # Get baseline in chunks of 128 bins + # np.split -> (16, n_events, n_channels, 128) + # np.mean -> (16, n_events, n_channels) + if n_bins is not None: + medians = np.media(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) - # Get baseline traces - # np.repeat -> (2048, n_events, n_channels) - baseline_traces = np.repeat(means, 128 % 2048, axis=0) + # Get baseline traces + # np.repeat -> (2048, n_events, n_channels) + baseline_traces = np.repeat(medians, n_bins % 2048, axis=0) + else: + medians = np.media(wfs, axis=-1) - # np.moveaxis -> (n_events, n_channels, 2048) - baseline_traces = np.moveaxis(baseline_traces, 0, -1) + # Get baseline traces + # np.repeat -> (2048, n_events, n_channels) + baseline_traces = np.repeat(medians, 2048, axis=0) + + # np.moveaxis -> (n_events, n_channels, 2048) + baseline_traces = np.moveaxis(baseline_traces, 0, -1) - return wfs - baseline_traces + return wfs - baseline_traces class readRNOGData: @@ -41,7 +59,9 @@ def begin(self, read_calibrated_data=False, apply_baseline_correction=True, convert_to_voltage=True, - select_triggers=None): + select_triggers=None, + run_types=["physics"], + max_trigger_rate=1 * units.Hz): """ @@ -70,7 +90,14 @@ def begin(self, convert_to_voltage: bool Only applies when non-calibrated data are read. If true, convert ADC to voltage. - (Default: True) + (Default: True) + + run_types: list + Used to select/reject runs from information in the RNO-G RunTable. List of run_types to be used. (Default: ['physics']) + + max_trigger_rate: float + Used to select/reject runs from information in the RNO-G RunTable. Maximum allowed trigger rate (per run) in Hz. + If 0, no cut is applied. (Default: 1 Hz) """ t0 = time.time() @@ -86,6 +113,13 @@ def begin(self, # is read and convert_to_voltage is True. self._adc_ref_voltage_range = 2.5 * units.volt self._adc_n_bits = 12 + + self.__max_trigger_rate = max_trigger_rate + self.__run_types = run_types + + if imported_runtable: + self.logger.debug("Access RunTable database ...") + self.__run_table = RunTable().get_table() if not isinstance(data_dirs, (list, np.ndarray)): data_dirs = [data_dirs] @@ -120,6 +154,11 @@ def begin(self, self.logger.error(f"The directory {data_dir} does not exist") dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir) + + # filter runs/datasets based on + if not self.__select_run(dataset): + continue + self._datasets.append(dataset) self.__n_events_per_dataset.append(dataset.N()) @@ -128,9 +167,33 @@ def begin(self, self._n_events_total = np.sum(self.__n_events_per_dataset) self._time_begin = time.time() - t0 + + + def __select_run(self, dataset): + """ Filter/select runs/datasets. Return True to select an dataset, return False to skip it """ + if not imported_runtable: + return True + + # get first eventInfo + dataset.setEntries(0) + event_info = dataset.eventInfo() + + run_id = event_info.run + station_id = event_info.station + + run_info = self.__run_table.query(f"station == {station_id:d} & run == {run_id:d}") + + if not run_info["run_type"].values[0] in self.__run_types: + return False + + if self.__max_trigger_rate and run_info["trigger_rate"].values[0] * units.Hz > self.__max_trigger_rate: + return False + + return True - def get_n_events_of_prev_datasets(self, dataset_idx): + def __get_n_events_of_prev_datasets(self, dataset_idx): + """ Get number of events from previous dataset to correctly set pointer """ dataset_idx_prev = dataset_idx - 1 return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 @@ -146,7 +209,7 @@ def run(self): dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) dataset = self._datasets[dataset_idx] - event_idx_in_dataset = event_idx - self.get_n_events_of_prev_datasets(dataset_idx) + event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event event_info = dataset.eventInfo() @@ -181,7 +244,7 @@ def run(self): if self._apply_baseline_correction: # correct baseline - wf = baseline_correction_128(wf) + wf = baseline_correction(wf) if self._convert_to_voltage: # convert adc to voltage From 88f97f98b13dae4ede79f1e5adfa00dff194d08c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 28 Mar 2023 12:10:36 +0200 Subject: [PATCH 010/102] Add counter for skipped runs --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 0a6aade6a..65a03b81c 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -148,6 +148,8 @@ def begin(self, self.logger.info(f"Parse through {len(data_dirs)} directories.") + self.__skipped_runs = 0 + for data_dir in data_dirs: if not os.path.exists(data_dir): @@ -157,6 +159,7 @@ def begin(self, # filter runs/datasets based on if not self.__select_run(dataset): + self.__skipped_runs += 1 continue self._datasets.append(dataset) @@ -265,4 +268,5 @@ def end(self): self.logger.info(f"Read {self.__counter} events (skipped {self.__skipped} events)" f"\n\tTime to initialize data sets : {self._time_begin:.2f}" f"\n\tTime to initialize all events : {self._time_run:.2f}" - f"\n\tTime to per event : {self._time_run / self.__counter:.2f}") \ No newline at end of file + f"\n\tTime to per event : {self._time_run / self.__counter:.2f}" + f"\n\tSkipped {self.__skipped_runs} runs.") \ No newline at end of file From 1ba190860578ee76a0035084196adfa9425ee94d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 28 Mar 2023 12:32:55 +0200 Subject: [PATCH 011/102] Small fix, more infos --- .../modules/io/rno_g/readRNOGDataMattak.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 65a03b81c..682b1ae19 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -34,13 +34,13 @@ def baseline_correction(wfs, n_bins=128): # np.split -> (16, n_events, n_channels, 128) # np.mean -> (16, n_events, n_channels) if n_bins is not None: - medians = np.media(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) + medians = np.median(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) # Get baseline traces # np.repeat -> (2048, n_events, n_channels) baseline_traces = np.repeat(medians, n_bins % 2048, axis=0) else: - medians = np.media(wfs, axis=-1) + medians = np.median(wfs, axis=-1) # Get baseline traces # np.repeat -> (2048, n_events, n_channels) @@ -149,6 +149,7 @@ def begin(self, self.logger.info(f"Parse through {len(data_dirs)} directories.") self.__skipped_runs = 0 + self.__n_runs = 0 for data_dir in data_dirs: @@ -162,6 +163,7 @@ def begin(self, self.__skipped_runs += 1 continue + self.__n_runs += 1 self._datasets.append(dataset) self.__n_events_per_dataset.append(dataset.N()) @@ -265,8 +267,9 @@ def run(self): def end(self): - self.logger.info(f"Read {self.__counter} events (skipped {self.__skipped} events)" - f"\n\tTime to initialize data sets : {self._time_begin:.2f}" - f"\n\tTime to initialize all events : {self._time_run:.2f}" - f"\n\tTime to per event : {self._time_run / self.__counter:.2f}" - f"\n\tSkipped {self.__skipped_runs} runs.") \ No newline at end of file + self.logger.info( + f"\n\tRead {self.__counter} events (skipped {self.__skipped} events)" + f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" + f"\n\tTime to initialize all events : {self._time_run:.2f}s" + f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" + f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") \ No newline at end of file From 0f2b4297ac80d1517341c4285eaf89f7c43e1294 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 28 Mar 2023 12:33:24 +0200 Subject: [PATCH 012/102] Renamed RNO-G noise directory --- NuRadioReco/modules/measured_noise/{RNO-G => RNO_G}/__init__.py | 0 .../modules/measured_noise/{RNO-G => RNO_G}/noiseImporter.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename NuRadioReco/modules/measured_noise/{RNO-G => RNO_G}/__init__.py (100%) rename NuRadioReco/modules/measured_noise/{RNO-G => RNO_G}/noiseImporter.py (99%) diff --git a/NuRadioReco/modules/measured_noise/RNO-G/__init__.py b/NuRadioReco/modules/measured_noise/RNO_G/__init__.py similarity index 100% rename from NuRadioReco/modules/measured_noise/RNO-G/__init__.py rename to NuRadioReco/modules/measured_noise/RNO_G/__init__.py diff --git a/NuRadioReco/modules/measured_noise/RNO-G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py similarity index 99% rename from NuRadioReco/modules/measured_noise/RNO-G/noiseImporter.py rename to NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index ff55705e2..ff8589d76 100644 --- a/NuRadioReco/modules/measured_noise/RNO-G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -12,7 +12,7 @@ import logging -class noiseImporterRNOG: +class noiseImporter: """ Imports recorded traces from RNOG stations. From ef19896577216253d8a09e203207d536c9e788cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 30 Mar 2023 11:52:59 +0200 Subject: [PATCH 013/102] Add more logger.info --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 682b1ae19..536ebf8d4 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -13,6 +13,7 @@ import NuRadioReco.framework.trigger from NuRadioReco.utilities import units +import mattak.Dataset try: from rnog_data.runtable import RunTable @@ -24,10 +25,6 @@ imported_runtable = False - -import mattak.Dataset - - def baseline_correction(wfs, n_bins=128): # Get baseline in chunks of 128 bins @@ -114,6 +111,9 @@ def begin(self, self._adc_ref_voltage_range = 2.5 * units.volt self._adc_n_bits = 12 + self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + + f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz") + self.__max_trigger_rate = max_trigger_rate self.__run_types = run_types From dd0893ad215885c022e74cc60d62b5f4b436a4d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 30 Mar 2023 12:14:21 +0200 Subject: [PATCH 014/102] Some cleanup, adding more logger info --- .../measured_noise/RNO_G/noiseImporter.py | 50 +++++++++++-------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index ff8589d76..0e48c69d3 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -1,14 +1,12 @@ import numpy as np import glob import os -import sys +import random from NuRadioReco.modules.io.rno_g.readRNOGDataMattak import readRNOGData from NuRadioReco.modules.base.module import register_run from NuRadioReco.utilities import units -from NuRadioReco.modules import channelResampler - import logging @@ -19,49 +17,59 @@ class noiseImporter: """ - def begin(self, noise_folder, station_ids=None, - channel_mapping=None, log_level=logging.INFO, - convert_noise_to_voltage=True, - match_station_ids=False): + def begin(self, noise_folder, + match_station_id=False, station_ids=None, + channel_mapping=None, scramble_noise_file_order=True, + log_level=logging.INFO): """ Parameters ---------- noise_folder: string - the folder containing the noise file or subfolders containing noise files + Folder containing noise file(s). Search in any subfolder as well. + + match_station_id: bool + If True, add only noise from stations with the same id. (Default: False) station_ids: list(int) - the station ids from which to add noise () + Only add noise from those station ids. If None, use any station. (Default: None) channel_mapping: dict or None option relevant for MC studies of new station designs where we do not have forced triggers for. The channel_mapping dictionary maps the channel ids of the MC station to the channel ids of the noise data Default is None which is 1-to-1 mapping + + scramble_noise_file_order: bool + If True, randomize the order of noise files before reading them. (Default: True) log_level: loggging log level the log level, default logging.INFO """ - self.__channel_mapping = channel_mapping - self.__station_ids = station_ids - self._convert_noise_to_voltage = convert_noise_to_voltage - self._match_station_ids = match_station_ids - - self._channel_respampler = channelResampler.channelResampler() - self.logger = logging.getLogger('noiseImporter') - self.logger.setLevel(log_level) + + self._match_station_id = match_station_id + self.__station_ids = station_ids + self.__channel_mapping = channel_mapping + self.logger.info(f"\n\tMatch station id: {match_station_id}" + f"\n\tUse noise from only those stations: {station_ids}" + f"\n\tUse the following channel mapping: {channel_mapping}" + f"\n\tRandomize sequence of noise files: {scramble_noise_file_order}") + noise_files = glob.glob(f"{noise_folder}/**/*root", recursive=True) - self.__noise_folders = np.unique([os.path.dirname(e) for e in noise_files]) + self.__noise_folders = np.unique([os.path.dirname(e) for e in noise_files]) self.logger.info(f"Found {len(self.__noise_folders)} folders in {noise_folder}") if not len(self.__noise_folders): raise ValueError + + if scramble_noise_file_order: + random.shuffle(self.__noise_folders) noise_reader = readRNOGData() selectors = [lambda einfo: einfo.triggerType == "FORCE"] @@ -87,7 +95,7 @@ def __get_noise_channel(self, channel_id): @register_run() def run(self, evt, station, det): - if self._match_station_ids: + if self._match_station_id: station_ids = self._buffer_station_id_list() mask = station_ids == station.get_id() @@ -105,7 +113,7 @@ def run(self, evt, station, det): noise_station = noise_event.get_station(station_id) if self.__station_ids is not None and not station_id in self.__station_ids: - raise KeyError() + raise ValueError(f"Station id {station_id} not in list of allowed ids: {self.__station_ids}") self.logger.debug("Selected noise event {} ({}, run {}, event {})".format( i_noise, noise_station.get_station_time(), noise_event.get_run_number(), @@ -117,7 +125,7 @@ def run(self, evt, station, det): trace = channel.get_trace() noise_channel = noise_station.get_channel(self.__get_noise_channel(channel_id)) noise_trace = noise_channel.get_trace() - + if len(trace) > 2048: self.logger.warn("Simulated trace is longer than 2048 bins... trim with :2048") trace = trace[:2048] From d9ada9a523083787e6cbd588305156ace936c35e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 30 Mar 2023 16:29:36 +0200 Subject: [PATCH 015/102] Make run selection optional, add time_offset between trigger/event time and trace_start_time --- .../modules/io/rno_g/readRNOGDataMattak.py | 47 ++++++++++++++++++- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 536ebf8d4..f14b68162 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -49,6 +49,42 @@ def baseline_correction(wfs, n_bins=128): return wfs - baseline_traces +def get_time_offset(trigger_type): + """ + Mapping the offset between trace start time and trigger time (~ signal time). + Temporary use hard-coded values for each trigger type. In the future this + information might be time, station, and channel dependent and should come + from a database (or is already calibrated in mattak) + + Parameters + ---------- + + trigger_type: str + Trigger type encoded as string from Mattak + + Returns + ------- + + time_offset: float + trace_start_time = trigger_time - time_offset + + """ + + time_offsets = { + "FORCE": 0, + "LT": 213 * units.ns, # ~ 1 / 3 of trace @ 2048 sample with 3.2 GSa/s + "RADIANT": 320 * units.ns # ~ 1 / 2 of trace @ 2048 sample with 3.2 GSa/s + } + + if trigger_type.startswith("RADIANT"): + trigger_type = "RADIANT" + + if trigger_type in time_offsets: + return time_offsets[trigger_type] + else: + raise KeyError(f"Unknown trigger type: {trigger_type}. Known are: FORCE, LT, RADIANT. Abort ....") + + class readRNOGData: def begin(self, @@ -57,6 +93,7 @@ def begin(self, apply_baseline_correction=True, convert_to_voltage=True, select_triggers=None, + select_runs=True, run_types=["physics"], max_trigger_rate=1 * units.Hz): @@ -89,6 +126,9 @@ def begin(self, Only applies when non-calibrated data are read. If true, convert ADC to voltage. (Default: True) + select_runs: bool + Select runs + run_types: list Used to select/reject runs from information in the RNO-G RunTable. List of run_types to be used. (Default: ['physics']) @@ -159,7 +199,7 @@ def begin(self, dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir) # filter runs/datasets based on - if not self.__select_run(dataset): + if select_runs and not self.__select_run(dataset): self.__skipped_runs += 1 continue @@ -231,11 +271,11 @@ def run(self): evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) - station.set_station_time(datetime.datetime.fromtimestamp(event_info.readoutTime)) trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) trigger.set_triggered() trigger.set_trigger_time(event_info.triggerTime) + station.set_station_time(datetime.datetime.fromtimestamp(event_info.triggerTime)) station.set_trigger(trigger) waveforms = dataset.wfs() @@ -256,6 +296,9 @@ def run(self): wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) channel.set_trace(wf, event_info.sampleRate * units.GHz) + + time_offset = get_time_offset(event_info.triggerType) + channel.set_trace_start_time(-time_offset) # relative to event/trigger time station.add_channel(channel) From b1b435fd212e10bb1c85392b3a5c149d509d151c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 30 Mar 2023 18:26:58 +0200 Subject: [PATCH 016/102] Use astropy.time instead of datetime to convert and set the station time --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index f14b68162..446eef930 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -3,7 +3,7 @@ import logging import os import time -import datetime +import astropy.time from NuRadioReco.modules.base.module import register_run @@ -271,11 +271,11 @@ def run(self): evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) + station.set_station_time(astropy.time.Time(event_info.triggerTime, format='unix')) trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) trigger.set_triggered() trigger.set_trigger_time(event_info.triggerTime) - station.set_station_time(datetime.datetime.fromtimestamp(event_info.triggerTime)) station.set_trigger(trigger) waveforms = dataset.wfs() From b1e10f75ecaf6427083294610262a8a910f147a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 31 Mar 2023 13:05:27 +0200 Subject: [PATCH 017/102] Add logger info and sanity check --- .../modules/io/rno_g/readRNOGDataMattak.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 446eef930..44875f0bc 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -151,8 +151,9 @@ def begin(self, self._adc_ref_voltage_range = 2.5 * units.volt self._adc_n_bits = 12 - self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + - f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz") + if select_runs: + self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + + f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz") self.__max_trigger_rate = max_trigger_rate self.__run_types = run_types @@ -210,9 +211,13 @@ def begin(self, # keeps track which event index is in which dataset self._event_idxs_datasets = np.cumsum(self.__n_events_per_dataset) self._n_events_total = np.sum(self.__n_events_per_dataset) - self._time_begin = time.time() - t0 + if not self._n_events_total: + err = "No runs have been selected. Abort ..." + self.logger.error(err) + raise ValueError(err) + def __select_run(self, dataset): """ Filter/select runs/datasets. Return True to select an dataset, return False to skip it """ @@ -227,11 +232,15 @@ def __select_run(self, dataset): station_id = event_info.station run_info = self.__run_table.query(f"station == {station_id:d} & run == {run_id:d}") + run_type = run_info["run_type"].values[0] - if not run_info["run_type"].values[0] in self.__run_types: + if not run_type in self.__run_types: + self.logger.info(f"Reject station {station_id} run {run_id} because of run type {run_type}") return False - if self.__max_trigger_rate and run_info["trigger_rate"].values[0] * units.Hz > self.__max_trigger_rate: + trigger_rate = run_info["trigger_rate"].values[0] * units.Hz + if self.__max_trigger_rate and trigger_rate > self.__max_trigger_rate: + self.logger.info(f"Reject station {station_id} run {run_id} because trigger rate is to high ({trigger_rate / units.Hz} Hz)") return False return True From 6f6134c92f82850f8337b9b5880f6156cddfcaa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 31 Mar 2023 13:05:45 +0200 Subject: [PATCH 018/102] small fix --- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 0e48c69d3..2a9e6ed1a 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -73,10 +73,12 @@ def begin(self, noise_folder, noise_reader = readRNOGData() selectors = [lambda einfo: einfo.triggerType == "FORCE"] - noise_reader.begin(self.__noise_folders, selectors=selectors) + noise_reader.begin(self.__noise_folders, selectors=selectors, log_level=log_level) self._noise_events = [evt for evt in noise_reader.run()] noise_reader.end() + self.__station_id_list = None + def _buffer_station_id_list(self): if self.__station_id_list is None: From 28f4ae7746cc27ee718ef8ec3dac247e71f8d828 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 3 Apr 2023 14:39:11 +0200 Subject: [PATCH 019/102] Add example script to read rno-g data with the new Mattak based reader. --- .../RNO_data/read_data_example/read_rnog.py | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py diff --git a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py new file mode 100644 index 000000000..a88ac77b9 --- /dev/null +++ b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py @@ -0,0 +1,65 @@ +from NuRadioReco.modules.io.rno_g import readRNOGDataMattak +from NuRadioReco.modules.io import eventWriter +from NuRadioReco.utilities import units + +import sys +import logging + +""" read in data """ +list_of_root_files = sys.argv[1:-1] +output_filename = sys.argv[-1] + +rnog_reader = readRNOGDataMattak.readRNOGData() +writer = eventWriter.eventWriter() + +""" +With a selector you can select or reject events based on information in the +Mattak class EventInfo. See https://github.com/RNO-G/mattak/blob/main/py/mattak/Dataset.py + +class EventInfo: + eventNumber: int + station : int + run: int + readoutTime : float + triggerTime : float + triggerType: str + sysclk: int + sysclkLastPPS: Tuple[int, int] # the last 2 PPS sysclks, most recent first + pps: int + radiantStartWindows: numpy.ndarray + sampleRate: float # Sample rate, in GSa/s +""" + +# The following selector selects only events with a forced trigger. +selectors = [lambda einfo: einfo.triggerType == "FORCE"] + +rnog_reader.begin( + list_of_root_files, + selectors=selectors, + log_level=logging.INFO, + # Currently false because Mattak does not contain calibrated data yet + read_calibrated_data=False, + # Only used when read_calibrated_data==False, performs a simple baseline subtraction each 128 bins + apply_baseline_correction=True, + # Only used when read_calibrated_data==False, performs a linear voltage calibration with hardcoded values + convert_to_voltage=True, + # Can be used instead of defining a selector (only for triggers) + select_triggers=None, + # If true, and if the RunTable database is available select runs based on the following criteria + select_runs=True, + # Only use runs of a certain run type + run_types=["physics"], + # Only use runs with a maximum trigger rate of 1 Hz + max_trigger_rate=1 * units.Hz) + +writer.begin(filename=output_filename) + +for i_event, event in enumerate(rnog_reader.run()): + + writer.run(event) + +print(i_event) +rnog_reader.end() +writer.end() + + From db82af0645b9af93c0892070f3e92a7e2831dd62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 3 Apr 2023 14:40:24 +0200 Subject: [PATCH 020/102] Access to DB can fail (for example without internet connection). Catch that --- .../modules/io/rno_g/readRNOGDataMattak.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 44875f0bc..d115b2d86 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -158,9 +158,15 @@ def begin(self, self.__max_trigger_rate = max_trigger_rate self.__run_types = run_types + global imported_runtable if imported_runtable: self.logger.debug("Access RunTable database ...") - self.__run_table = RunTable().get_table() + try: + self.__run_table = RunTable().get_table() + except: + self.logger.error("No connect to RunTable database could be established. " + "Runs will not be filtered.") + imported_runtable = False if not isinstance(data_dirs, (list, np.ndarray)): data_dirs = [data_dirs] @@ -200,7 +206,7 @@ def begin(self, dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir) # filter runs/datasets based on - if select_runs and not self.__select_run(dataset): + if select_runs and imported_runtable and not self.__select_run(dataset): self.__skipped_runs += 1 continue @@ -221,9 +227,7 @@ def begin(self, def __select_run(self, dataset): """ Filter/select runs/datasets. Return True to select an dataset, return False to skip it """ - if not imported_runtable: - return True - + # get first eventInfo dataset.setEntries(0) event_info = dataset.eventInfo() @@ -287,6 +291,7 @@ def run(self): trigger.set_trigger_time(event_info.triggerTime) station.set_trigger(trigger) + # access data waveforms = dataset.wfs() for channel_id, wf in enumerate(waveforms): From 85cc1daffadffff10af306165e300f5e6a5d381f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 3 Apr 2023 14:41:58 +0200 Subject: [PATCH 021/102] Small refactoring of eventWriter.py --- NuRadioReco/modules/io/eventWriter.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/eventWriter.py b/NuRadioReco/modules/io/eventWriter.py index a924b646d..0204943bd 100644 --- a/NuRadioReco/modules/io/eventWriter.py +++ b/NuRadioReco/modules/io/eventWriter.py @@ -72,12 +72,14 @@ def begin(self, filename, max_file_size=1024, check_for_duplicates=False, events both set, the file will be split whenever any of the two conditions is fullfilled. """ logger.setLevel(log_level) - if filename[-4:] == '.nur': + if filename.endswith(".nur"): self.__filename = filename[:-4] else: self.__filename = filename - if filename[-4:] == '.ari': + + if filename.endswith('.ari'): logger.warning('The file ending .ari for NuRadioReco files is deprecated. Please use .nur instead.') + self.__check_for_duplicates = check_for_duplicates self.__number_of_events = 0 self.__current_file_size = 0 From a74fbc963704fd375cea2ef7d2b3597675cc788d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 4 Apr 2023 18:14:00 +0200 Subject: [PATCH 022/102] Implement run() as wrapper around __run(). This allows to specify the event index to get a specific event rather than only looping over all. Started to implement some function to get event header information --- .../modules/io/rno_g/readRNOGDataMattak.py | 159 +++++++++++------- 1 file changed, 102 insertions(+), 57 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index d115b2d86..10ded678a 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -254,73 +254,118 @@ def __get_n_events_of_prev_datasets(self, dataset_idx): """ Get number of events from previous dataset to correctly set pointer """ dataset_idx_prev = dataset_idx - 1 return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 + + + def __get_dataset_and_event_info(self, event_idx): + """ Set pointer to correct """ + # find correct dataset + dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) + dataset = self._datasets[dataset_idx] + event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) + dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event + + event_info = dataset.eventInfo() - @register_run() - def run(self): - + if self._selectors is not None: + for selector in self._selectors: + if not selector(event_info): + return None, None + + return dataset, event_info + + + def get_event_information_dict(self, keys=["station", "run"]): + + data = {} + for event_idx in range(self._n_events_total): - self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") - t0 = time.time() - - # find correct dataset - dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) - dataset = self._datasets[dataset_idx] - - event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) - dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event - event_info = dataset.eventInfo() - - skip = False - if self._selectors is not None: - for selector in self._selectors: - if not selector(event_info): - skip = True + _, event_info = self.__get_dataset_and_event_info(event_idx) - if skip: - self.__skipped += 1 + if event_info is None: continue - - evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) - station = NuRadioReco.framework.station.Station(event_info.station) - station.set_station_time(astropy.time.Time(event_info.triggerTime, format='unix')) + + data[event_idx] = {getattr(event_info, key) for key in keys} + + return data - trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) - trigger.set_triggered() - trigger.set_trigger_time(event_info.triggerTime) - station.set_trigger(trigger) + + def __run(self, event_idx): + """ Returns a single event with certain index """ + + self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") + t0 = time.time() - # access data - waveforms = dataset.wfs() - - for channel_id, wf in enumerate(waveforms): - channel = NuRadioReco.framework.channel.Channel(channel_id) - if self._read_calibrated_data: - channel.set_trace(wf * units.mV, event_info.sampleRate * units.GHz) - else: - # wf stores ADC counts - - if self._apply_baseline_correction: - # correct baseline - wf = baseline_correction(wf) - - if self._convert_to_voltage: - # convert adc to voltage - wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) - - channel.set_trace(wf, event_info.sampleRate * units.GHz) + dataset, event_info = self.__get_dataset_and_event_info(event_idx) + + if event_info is None: + self.__skipped += 1 + return None + + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) + station = NuRadioReco.framework.station.Station(event_info.station) + station.set_station_time(astropy.time.Time(event_info.triggerTime, format='unix')) + + trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) + trigger.set_triggered() + trigger.set_trigger_time(event_info.triggerTime) + station.set_trigger(trigger) + + # access data + waveforms = dataset.wfs() + + for channel_id, wf in enumerate(waveforms): + channel = NuRadioReco.framework.channel.Channel(channel_id) + if self._read_calibrated_data: + channel.set_trace(wf * units.mV, event_info.sampleRate * units.GHz) + else: + # wf stores ADC counts - time_offset = get_time_offset(event_info.triggerType) - channel.set_trace_start_time(-time_offset) # relative to event/trigger time - - station.add_channel(channel) - - evt.set_station(station) + if self._apply_baseline_correction: + # correct baseline + wf = baseline_correction(wf) + + if self._convert_to_voltage: + # convert adc to voltage + wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) + + channel.set_trace(wf, event_info.sampleRate * units.GHz) - self._time_run += time.time() - t0 - self.__counter += 1 - yield evt + time_offset = get_time_offset(event_info.triggerType) + channel.set_trace_start_time(-time_offset) # relative to event/trigger time + + station.add_channel(channel) + + evt.set_station(station) + + self._time_run += time.time() - t0 + self.__counter += 1 + yield evt + + + @register_run() + def run(self, event_index=None): + """ + Loop over all events or one specific event with event_index. + + Parameters + ---------- + + event_index: int + Incremental index. If None, loop over all events. (Default: None) + + Returns + ------- + + evt: NuRadioReco.framework.event + """ + + if event_index is None: + for event_idx in range(self._n_events_total): + yield from self.__run(event_idx) + else: + yield from self.__run(event_index) def end(self): From 2015fdbb2b5bb39a7f6f21e7451a6ddaacf75f7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 5 Apr 2023 14:43:00 +0200 Subject: [PATCH 023/102] Make interface of rnog noiseImporter more flexiable: Allow to provide list of noise folders --- .../measured_noise/RNO_G/noiseImporter.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 2a9e6ed1a..dd86d0914 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -17,7 +17,7 @@ class noiseImporter: """ - def begin(self, noise_folder, + def begin(self, noise_folders, file_pattern="*", match_station_id=False, station_ids=None, channel_mapping=None, scramble_noise_file_order=True, log_level=logging.INFO): @@ -25,8 +25,11 @@ def begin(self, noise_folder, Parameters ---------- - noise_folder: string - Folder containing noise file(s). Search in any subfolder as well. + noise_folders: str or list(str) + Folder(s) containing noise file(s). Search in any subfolder as well. + + file_patters: str + File patters used to search for directories, (Default: "*", other examples might be "combined") match_station_id: bool If True, add only noise from stations with the same id. (Default: False) @@ -61,10 +64,16 @@ def begin(self, noise_folder, f"\n\tUse the following channel mapping: {channel_mapping}" f"\n\tRandomize sequence of noise files: {scramble_noise_file_order}") - noise_files = glob.glob(f"{noise_folder}/**/*root", recursive=True) + if not isinstance(noise_folders, list): + noise_folders = [noise_folders] + + # find all subfolders + noise_files = [] + for noise_folder in noise_folders: + noise_files += glob.glob(f"{noise_folder}/**/{file_pattern}root", recursive=True) self.__noise_folders = np.unique([os.path.dirname(e) for e in noise_files]) - self.logger.info(f"Found {len(self.__noise_folders)} folders in {noise_folder}") + self.logger.info(f"Found {len(self.__noise_folders)}") if not len(self.__noise_folders): raise ValueError From 2714ada31d529742360c9765f39d3d339ebf7563 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 5 Apr 2023 15:37:10 +0200 Subject: [PATCH 024/102] Optimize reader --- .../modules/io/rno_g/readRNOGDataMattak.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 10ded678a..68f10aacd 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -183,7 +183,7 @@ def begin(self, selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) self._selectors = selectors - self.logger.info(f"Found {len(self._selectors)} selectors") + self.logger.info(f"Found {len(self._selectors)} selector(s)") self._time_begin = 0 self._time_run = 0 @@ -193,7 +193,7 @@ def begin(self, self._datasets = [] self.__n_events_per_dataset = [] - self.logger.info(f"Parse through {len(data_dirs)} directories.") + self.logger.info(f"Parse through {len(data_dirs)} directory/ies.") self.__skipped_runs = 0 self.__n_runs = 0 @@ -265,7 +265,7 @@ def __get_dataset_and_event_info(self, event_idx): event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event - event_info = dataset.eventInfo() + event_info = dataset.eventInfo() # returns a single eventInfo if self._selectors is not None: for selector in self._selectors: @@ -278,15 +278,22 @@ def __get_dataset_and_event_info(self, event_idx): def get_event_information_dict(self, keys=["station", "run"]): data = {} + n_prev = 0 + for dataset in self._datasets: + dataset.setEntries(0, dataset.N()) + + for idx, eventinfo in enumerate(dataset.eventInfo()): # returns a list - for event_idx in range(self._n_events_total): + event_idx = idx + n_prev # event index accross all datasets combined - _, event_info = self.__get_dataset_and_event_info(event_idx) + if self._selectors is not None: + for selector in self._selectors: + if not selector(eventinfo): + continue - if event_info is None: - continue + data[event_idx] = {getattr(eventinfo, key) for key in keys} - data[event_idx] = {getattr(event_info, key) for key in keys} + n_prev += dataset.N() return data From 2ab1b5fa5391331df6c5b39330627238ab77d29c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 5 Apr 2023 16:05:09 +0200 Subject: [PATCH 025/102] Allow to specift the mattak backend for both modules. Small fix in reader --- .../modules/io/rno_g/readRNOGDataMattak.py | 11 +++++--- .../measured_noise/RNO_G/noiseImporter.py | 27 +++++++++---------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 68f10aacd..d0c7c878c 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -95,7 +95,8 @@ def begin(self, select_triggers=None, select_runs=True, run_types=["physics"], - max_trigger_rate=1 * units.Hz): + max_trigger_rate=1 * units.Hz, + mattak_backend="auto"): """ @@ -135,6 +136,10 @@ def begin(self, max_trigger_rate: float Used to select/reject runs from information in the RNO-G RunTable. Maximum allowed trigger rate (per run) in Hz. If 0, no cut is applied. (Default: 1 Hz) + + mattak_backend: str + Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise + a "fallback" to uproot is used. (Default: "auto") """ t0 = time.time() @@ -203,7 +208,7 @@ def begin(self, if not os.path.exists(data_dir): self.logger.error(f"The directory {data_dir} does not exist") - dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir) + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend) # filter runs/datasets based on if select_runs and imported_runtable and not self.__select_run(dataset): @@ -280,7 +285,7 @@ def get_event_information_dict(self, keys=["station", "run"]): data = {} n_prev = 0 for dataset in self._datasets: - dataset.setEntries(0, dataset.N()) + dataset.setEntries((0, dataset.N())) for idx, eventinfo in enumerate(dataset.eventInfo()): # returns a list diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index dd86d0914..a52de7f9b 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -20,7 +20,7 @@ class noiseImporter: def begin(self, noise_folders, file_pattern="*", match_station_id=False, station_ids=None, channel_mapping=None, scramble_noise_file_order=True, - log_level=logging.INFO): + log_level=logging.INFO, mattak_backend="auto"): """ Parameters @@ -48,7 +48,10 @@ def begin(self, noise_folders, file_pattern="*", log_level: loggging log level the log level, default logging.INFO - + + mattak_backend: str + Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise + a "fallback" to uproot is used. (Default: "auto") """ self.logger = logging.getLogger('noiseImporter') @@ -80,20 +83,14 @@ def begin(self, noise_folders, file_pattern="*", if scramble_noise_file_order: random.shuffle(self.__noise_folders) - noise_reader = readRNOGData() + self._noise_reader = readRNOGData() selectors = [lambda einfo: einfo.triggerType == "FORCE"] - noise_reader.begin(self.__noise_folders, selectors=selectors, log_level=log_level) - self._noise_events = [evt for evt in noise_reader.run()] - noise_reader.end() - - self.__station_id_list = None - - - def _buffer_station_id_list(self): - if self.__station_id_list is None: - self.__station_id_list = np.squeeze([evt.get_station_ids() for evt in self._noise_events]) - - return self.__station_id_list + self._noise_reader.begin(self.__noise_folders, selectors=selectors, log_level=log_level, mattak_backend=mattak_backend) + import time + + t0 = time.time() + self._noise_events = [evt for evt in self._noise_reader.run()] + print(time.time() - t0) def __get_noise_channel(self, channel_id): From 07dc3b217bff0ea74848963f186d431137dfd767 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 5 Apr 2023 17:04:19 +0200 Subject: [PATCH 026/102] Finish implementing optimization for noiseImporter. Decoupled generator function (i.e., run()) and function to return a specific event (read_event). Some fixes --- .../modules/io/rno_g/readRNOGDataMattak.py | 78 ++++++++++--------- .../measured_noise/RNO_G/noiseImporter.py | 33 ++++---- 2 files changed, 59 insertions(+), 52 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index d0c7c878c..01ef01346 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -224,6 +224,9 @@ def begin(self, self._n_events_total = np.sum(self.__n_events_per_dataset) self._time_begin = time.time() - t0 + # Variable not yet implemented in mattak + # self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") + if not self._n_events_total: err = "No runs have been selected. Abort ..." self.logger.error(err) @@ -261,7 +264,7 @@ def __get_n_events_of_prev_datasets(self, dataset_idx): return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 - def __get_dataset_and_event_info(self, event_idx): + def __get_dataset_for_event(self, event_idx): """ Set pointer to correct """ # find correct dataset dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) @@ -269,15 +272,8 @@ def __get_dataset_and_event_info(self, event_idx): event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event - - event_info = dataset.eventInfo() # returns a single eventInfo - - if self._selectors is not None: - for selector in self._selectors: - if not selector(event_info): - return None, None - - return dataset, event_info + + return dataset def get_event_information_dict(self, keys=["station", "run"]): @@ -287,19 +283,27 @@ def get_event_information_dict(self, keys=["station", "run"]): for dataset in self._datasets: dataset.setEntries((0, dataset.N())) - for idx, eventinfo in enumerate(dataset.eventInfo()): # returns a list + for idx, evtinfo in enumerate(dataset.eventInfo()): # returns a list event_idx = idx + n_prev # event index accross all datasets combined + skip = False if self._selectors is not None: - for selector in self._selectors: - if not selector(eventinfo): - continue + for selector in self._selectors: + if not selector(evtinfo): + self.logger.debug(f"Event {event_idx} (station {evtinfo.station}, run {evtinfo.run}, " + f"event number {evtinfo.eventNumber}) is skipped.") + skip = True + break + + if skip: + self.__skipped += 1 + continue - data[event_idx] = {getattr(eventinfo, key) for key in keys} + data[event_idx] = {key: getattr(evtinfo, key) for key in keys} n_prev += dataset.N() - + return data @@ -309,12 +313,17 @@ def __run(self, event_idx): self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") t0 = time.time() - dataset, event_info = self.__get_dataset_and_event_info(event_idx) - - if event_info is None: - self.__skipped += 1 - return None - + dataset = self.__get_dataset_for_event(event_idx) + event_info = dataset.eventInfo() # returns a single eventInfo + + if self._selectors is not None: + for selector in self._selectors: + if not selector(event_info): + self.logger.debug(f"Event {event_idx} (station {event_info.station}, run {event_info.run}, " + f"event number {event_info.eventNumber}) is skipped.") + self.__skipped += 1 + return None + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) station.set_station_time(astropy.time.Time(event_info.triggerTime, format='unix')) @@ -353,31 +362,26 @@ def __run(self, event_idx): self._time_run += time.time() - t0 self.__counter += 1 - yield evt + return evt @register_run() - def run(self, event_index=None): + def run(self): """ - Loop over all events or one specific event with event_index. - - Parameters - ---------- - - event_index: int - Incremental index. If None, loop over all events. (Default: None) + Loop over all events. Returns ------- evt: NuRadioReco.framework.event """ - - if event_index is None: - for event_idx in range(self._n_events_total): - yield from self.__run(event_idx) - else: - yield from self.__run(event_index) + + for event_idx in range(self._n_events_total): + yield self.__run(event_idx) + + + def read_event(self, event_index): + return self.__run(event_index) def end(self): diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index a52de7f9b..31051de69 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -2,6 +2,7 @@ import glob import os import random +import sys from NuRadioReco.modules.io.rno_g.readRNOGDataMattak import readRNOGData from NuRadioReco.modules.base.module import register_run @@ -86,12 +87,14 @@ def begin(self, noise_folders, file_pattern="*", self._noise_reader = readRNOGData() selectors = [lambda einfo: einfo.triggerType == "FORCE"] self._noise_reader.begin(self.__noise_folders, selectors=selectors, log_level=log_level, mattak_backend=mattak_backend) - import time - t0 = time.time() - self._noise_events = [evt for evt in self._noise_reader.run()] - print(time.time() - t0) - + + self.logger.info("Get event informations ...") + # instead of reading all noise events into memory we only get certain information here and read all data in run() + noise_information = self._noise_reader.get_event_information_dict(keys=["station"]) + self.__event_index_list = np.array(list(noise_information.keys())) + self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) + def __get_noise_channel(self, channel_id): if self.__channel_mapping is None: @@ -104,18 +107,17 @@ def __get_noise_channel(self, channel_id): def run(self, evt, station, det): if self._match_station_id: - - station_ids = self._buffer_station_id_list() - mask = station_ids == station.get_id() - if not np.any(mask): + # select only noise events from simulated station id + station_mask = self.__station_id_list == station.get_id() + if not np.any(station_mask): raise ValueError(f"No station with id {station.get_id()} in noise data.") - - i_noise = np.random.choice(np.arange(len(mask))[mask]) - + else: - i_noise = np.random.randint(0, len(self._noise_events)) - - noise_event = self._noise_events[i_noise] + # select all noise events + station_mask = np.full_like(self.__event_index_list, True) + + i_noise = np.random.choice(self.__event_index_list[station_mask]) + noise_event = self._noise_reader.read_event(i_noise) station_id = noise_event.get_station_ids()[0] noise_station = noise_event.get_station(station_id) @@ -155,4 +157,5 @@ def run(self, evt, station, det): channel.set_trace(trace, channel.get_sampling_rate()) def end(self): + self._noise_reader.end() pass From 2d217d9702db05dd8ada60a44dfc8d9d353ee63e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 5 Apr 2023 19:02:09 +0200 Subject: [PATCH 027/102] Further optimizing the mattak reader. This commit should improve the performance when using the generator (i.e., the run() method) and in particular the uproot backend. Add some logger info to the noiseImporter --- .../modules/io/rno_g/readRNOGDataMattak.py | 101 +++++++++++------- .../measured_noise/RNO_G/noiseImporter.py | 13 ++- 2 files changed, 74 insertions(+), 40 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 01ef01346..08b5f1587 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -276,6 +276,18 @@ def __get_dataset_for_event(self, event_idx): return dataset + def filter_event(self, evtinfo, event_idx=None): + if self._selectors is not None: + for selector in self._selectors: + if not selector(evtinfo): + self.logger.debug(f"Event {event_idx} (station {evtinfo.station}, run {evtinfo.run}, " + f"event number {evtinfo.eventNumber}) is skipped.") + self.__skipped += 1 + return True + + return False + + def get_event_information_dict(self, keys=["station", "run"]): data = {} @@ -287,17 +299,7 @@ def get_event_information_dict(self, keys=["station", "run"]): event_idx = idx + n_prev # event index accross all datasets combined - skip = False - if self._selectors is not None: - for selector in self._selectors: - if not selector(evtinfo): - self.logger.debug(f"Event {event_idx} (station {evtinfo.station}, run {evtinfo.run}, " - f"event number {evtinfo.eventNumber}) is skipped.") - skip = True - break - - if skip: - self.__skipped += 1 + if self.filter_event(evtinfo, event_idx): continue data[event_idx] = {key: getattr(evtinfo, key) for key in keys} @@ -305,25 +307,10 @@ def get_event_information_dict(self, keys=["station", "run"]): n_prev += dataset.N() return data - - def __run(self, event_idx): - """ Returns a single event with certain index """ - self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") - t0 = time.time() - - dataset = self.__get_dataset_for_event(event_idx) - event_info = dataset.eventInfo() # returns a single eventInfo - - if self._selectors is not None: - for selector in self._selectors: - if not selector(event_info): - self.logger.debug(f"Event {event_idx} (station {event_info.station}, run {event_info.run}, " - f"event number {event_info.eventNumber}) is skipped.") - self.__skipped += 1 - return None - + def get_event(self, event_info, waveforms): + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) station.set_station_time(astropy.time.Time(event_info.triggerTime, format='unix')) @@ -332,9 +319,6 @@ def __run(self, event_idx): trigger.set_triggered() trigger.set_trigger_time(event_info.triggerTime) station.set_trigger(trigger) - - # access data - waveforms = dataset.wfs() for channel_id, wf in enumerate(waveforms): channel = NuRadioReco.framework.channel.Channel(channel_id) @@ -360,9 +344,7 @@ def __run(self, event_idx): evt.set_station(station) - self._time_run += time.time() - t0 - self.__counter += 1 - return evt + return evt @register_run() @@ -375,13 +357,58 @@ def run(self): evt: NuRadioReco.framework.event """ + event_idx = -1 + for dataset in self._datasets: + dataset.setEntries((0, dataset.N())) + + # read all event infos of the entier dataset (= run) + event_infos = dataset.eventInfo() + wfs = None + + for idx, evtinfo in enumerate(event_infos): # returns a list + event_idx += 1 + + self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") + t0 = time.time() + + if self.filter_event(evtinfo, event_idx): + continue + + # Just read wfs if necessary + if wfs is None: + wfs = dataset.wfs() + + waveforms_of_event = wfs[idx] + + evt = self.get_event(evtinfo, waveforms_of_event) + + self._time_run += time.time() - t0 + self.__counter += 1 + + yield evt - for event_idx in range(self._n_events_total): - yield self.__run(event_idx) def read_event(self, event_index): - return self.__run(event_index) + + self.logger.debug(f"Processing event number {event_index} out of total {self._n_events_total}") + t0 = time.time() + + dataset = self.__get_dataset_for_event(event_index) + event_info = dataset.eventInfo() # returns a single eventInfo + + if self.filter_event(event_info, event_index): + return None + + # access data + waveforms = dataset.wfs() + + evt = self.get_event(event_info, waveforms) + + self._time_run += time.time() - t0 + self.__counter += 1 + + return evt def end(self): diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 31051de69..e018a9715 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -2,7 +2,7 @@ import glob import os import random -import sys +import collections from NuRadioReco.modules.io.rno_g.readRNOGDataMattak import readRNOGData from NuRadioReco.modules.base.module import register_run @@ -55,7 +55,7 @@ def begin(self, noise_folders, file_pattern="*", a "fallback" to uproot is used. (Default: "auto") """ - self.logger = logging.getLogger('noiseImporter') + self.logger = logging.getLogger('NuRadioReco.RNOG.noiseImporter') self.logger.setLevel(log_level) self._match_station_id = match_station_id @@ -94,6 +94,8 @@ def begin(self, noise_folders, file_pattern="*", noise_information = self._noise_reader.get_event_information_dict(keys=["station"]) self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) + + self._n_use_event = collections.defaultdict(int) def __get_noise_channel(self, channel_id): @@ -116,7 +118,8 @@ def run(self, evt, station, det): # select all noise events station_mask = np.full_like(self.__event_index_list, True) - i_noise = np.random.choice(self.__event_index_list[station_mask]) + i_noise = np.random.choice(self.__event_index_list[station_mask]) + self._n_use_event[i_noise] += 1 noise_event = self._noise_reader.read_event(i_noise) station_id = noise_event.get_station_ids()[0] @@ -158,4 +161,8 @@ def run(self, evt, station, det): def end(self): self._noise_reader.end() + n_use = np.array(list(self._n_use_event.values())) + sort = np.flip(np.argsort(n_use)) + self.logger.info("\n\tThe five most used noise events have been used: {}" + .format(", ".join([str(ele) for ele in n_use[sort][:5]]))) pass From b098e28df7fc13e0a23688c7beb58a562e0c8ab9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 5 Apr 2023 19:26:40 +0200 Subject: [PATCH 028/102] Add doc strings to please the coding god --- .../modules/io/rno_g/readRNOGDataMattak.py | 96 ++++++++++++++++++- 1 file changed, 92 insertions(+), 4 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 08b5f1587..9ce6512e7 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -234,7 +234,16 @@ def begin(self, def __select_run(self, dataset): - """ Filter/select runs/datasets. Return True to select an dataset, return False to skip it """ + """ Filter/select runs/datasets. + + Parameters + ---------- + + dataset: mattak.Dataset.Dataset + + select: bool + Return True to select an dataset, return False to reject/skip it. + """ # get first eventInfo dataset.setEntries(0) @@ -259,13 +268,25 @@ def __select_run(self, dataset): def __get_n_events_of_prev_datasets(self, dataset_idx): - """ Get number of events from previous dataset to correctly set pointer """ + """ Get accumulated number of events from previous datasets """ dataset_idx_prev = dataset_idx - 1 return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 def __get_dataset_for_event(self, event_idx): - """ Set pointer to correct """ + """ Get correct dataset and set entry accordingly to event index + + Parameters + ---------- + + event_index: int + Same as in read_event(). + + Retruns + ------- + + dataset: mattak.Dataset.Dataset + """ # find correct dataset dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) dataset = self._datasets[dataset_idx] @@ -277,6 +298,23 @@ def __get_dataset_for_event(self, event_idx): def filter_event(self, evtinfo, event_idx=None): + """ Filter an event base on its EventInfo and the configured selectors. + + Parameters + ---------- + + event_info: mattak.Dataset.EventInfo + The event info object for one event. + + event_index: int + Same as in read_event(). Only use for logger.info(). (Default: None) + + Returns + ------- + + skip: bool + Returns True to skip/reject event, return False to keep/read event + """ if self._selectors is not None: for selector in self._selectors: if not selector(evtinfo): @@ -289,6 +327,26 @@ def filter_event(self, evtinfo, event_idx=None): def get_event_information_dict(self, keys=["station", "run"]): + """ Return information of all events from the EventInfo + + This function is useful to make a pre-selection of events before actually reading them in combination with + self.read_event(). + + Parameters + ---------- + + keys: list(str) + List of the information to receive from each event. Have to match the attributes (member variables) + of the mattak.Dataset.EventInfo class (examples are "station", "run", "triggerTime", "triggerType", "eventNumber", ...). + (Default: ["station", "run"]) + + Returns + ------- + + data: dict + Keys of the dict are the event indecies (as used in self.read_event(event_index)). The values are dictinaries + them self containing the information specified with "keys" parameter. + """ data = {} n_prev = 0 @@ -310,6 +368,22 @@ def get_event_information_dict(self, keys=["station", "run"]): def get_event(self, event_info, waveforms): + """ Return a NuRadioReco event + + Parameters + ---------- + + event_info: mattak.Dataset.EventInfo + The event info object for one event. + + waveforms: np.array(n_channel, n_samples) + Typically what dataset.wfs() returns (for one event!) + + Returns + ------- + + evt: NuRadioReco.framework.event + """ evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) @@ -355,7 +429,7 @@ def run(self): Returns ------- - evt: NuRadioReco.framework.event + evt: generator(NuRadioReco.framework.event) """ event_idx = -1 for dataset in self._datasets: @@ -390,6 +464,20 @@ def run(self): def read_event(self, event_index): + """ Allows to read a specific event identifed by its index + + Parameters + ---------- + + event_index: int + The index of a particluar event. The index is the chronological number from 0 to + number of total events (across all datasets). + + Returns + ------- + + evt: NuRadioReco.framework.event + """ self.logger.debug(f"Processing event number {event_index} out of total {self._n_events_total}") t0 = time.time() From 758c7eff539149dad60b83002cdc63933218b033 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 6 Apr 2023 17:44:56 +0200 Subject: [PATCH 029/102] Details: Allow to import run summary table from a cvs file. Add run selection based on time. Change interface of noiseImporter to accept dict for reader arguments --- .../modules/io/rno_g/readRNOGDataMattak.py | 77 ++++++++++++++----- .../measured_noise/RNO_G/noiseImporter.py | 18 +++-- 2 files changed, 71 insertions(+), 24 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 9ce6512e7..f8bbba1f2 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -94,7 +94,9 @@ def begin(self, convert_to_voltage=True, select_triggers=None, select_runs=True, + run_table_path=None, run_types=["physics"], + run_time_range=None, max_trigger_rate=1 * units.Hz, mattak_backend="auto"): @@ -112,8 +114,7 @@ def begin(self, selectors: list of lambdas List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. Example: - - trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" + trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" read_calibrated_data: bool If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. @@ -130,9 +131,18 @@ def begin(self, select_runs: bool Select runs + run_table_path: str + Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) + run_types: list Used to select/reject runs from information in the RNO-G RunTable. List of run_types to be used. (Default: ['physics']) + run_time_range: tuple + Specify a time range to select runs (it is sufficient that runs cover the time range partially). + Each value of the tuple has to be in a format which astropy.time.Time understands. A value can be None + which means that the lower or upper bound is unconstrained. If run_time_range is None no time selection is + applied. (Default: None) + max_trigger_rate: float Used to select/reject runs from information in the RNO-G RunTable. Maximum allowed trigger rate (per run) in Hz. If 0, no cut is applied. (Default: 1 Hz) @@ -156,23 +166,37 @@ def begin(self, self._adc_ref_voltage_range = 2.5 * units.volt self._adc_n_bits = 12 - if select_runs: - self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + - f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz") - self.__max_trigger_rate = max_trigger_rate self.__run_types = run_types - global imported_runtable - if imported_runtable: - self.logger.debug("Access RunTable database ...") - try: - self.__run_table = RunTable().get_table() - except: - self.logger.error("No connect to RunTable database could be established. " - "Runs will not be filtered.") - imported_runtable = False - + if run_time_range is not None: + convert_time = lambda t: None if t is None else astropy.time.Time(t) + self._time_low = convert_time(run_time_range[0]) + self._time_high = convert_time(run_time_range[1]) + else: + self._time_low = None + self._time_high = None + + if select_runs: + if run_table_path is None: + global imported_runtable + if imported_runtable: + self.logger.debug("Access RunTable database ...") + try: + self.__run_table = RunTable().get_table() + except: + self.logger.error("No connect to RunTable database could be established. " + "Runs will not be filtered.") + imported_runtable = False + else: + self.__run_table = pandas.read_csv(run_table_path) + imported_runtable = True + + if select_runs: + self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + + f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz" + f"\n\tSelect runs which are between {self._time_low} - {self._time_high}") + if not isinstance(data_dirs, (list, np.ndarray)): data_dirs = [data_dirs] @@ -209,7 +233,7 @@ def begin(self, self.logger.error(f"The directory {data_dir} does not exist") dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend) - + # filter runs/datasets based on if select_runs and imported_runtable and not self.__select_run(dataset): self.__skipped_runs += 1 @@ -224,6 +248,8 @@ def begin(self, self._n_events_total = np.sum(self.__n_events_per_dataset) self._time_begin = time.time() - t0 + self.logger.info(f"{self._n_events_total} events in {len(self._datasets)} runs/datasets have been found.") + # Variable not yet implemented in mattak # self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") @@ -253,8 +279,23 @@ def __select_run(self, dataset): station_id = event_info.station run_info = self.__run_table.query(f"station == {station_id:d} & run == {run_id:d}") - run_type = run_info["run_type"].values[0] + # "time_start/end" is stored in the isot format. datetime is much faster than astropy (~85ns vs 55 mus). + # But using datetime would mean to stip decimals because datetime can only handle mu sec precision and can not cope + # with the additional decimals for ns. + if self._time_low is not None: + time_end = astropy.time.Time(run_info["time_end"].values[0]) + if time_end < self._time_low: + self.logger.info(f"Reject station {station_id} run {run_id} because run ended before {self._time_low}") + return False + + if self._time_high is not None: + time_start = astropy.time.Time(run_info["time_start"].values[0]) + if time_start > self._time_high: + self.logger.info(f"Reject station {station_id} run {run_id} because run started time after {self._time_high}") + return False + + run_type = run_info["run_type"].values[0] if not run_type in self.__run_types: self.logger.info(f"Reject station {station_id} run {run_id} because of run type {run_type}") return False diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index e018a9715..d046a37fd 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -21,7 +21,7 @@ class noiseImporter: def begin(self, noise_folders, file_pattern="*", match_station_id=False, station_ids=None, channel_mapping=None, scramble_noise_file_order=True, - log_level=logging.INFO, mattak_backend="auto"): + log_level=logging.INFO, reader_kwargs={}): """ Parameters @@ -50,9 +50,8 @@ def begin(self, noise_folders, file_pattern="*", log_level: loggging log level the log level, default logging.INFO - mattak_backend: str - Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise - a "fallback" to uproot is used. (Default: "auto") + reader_kwargs: dict + Optional arguements passed to readRNOGDataMattak """ self.logger = logging.getLogger('NuRadioReco.RNOG.noiseImporter') @@ -84,9 +83,16 @@ def begin(self, noise_folders, file_pattern="*", if scramble_noise_file_order: random.shuffle(self.__noise_folders) - self._noise_reader = readRNOGData() + if "log_level" in reader_kwargs: + log_level_reader = reader_kwargs.pop("log_level") + else: + log_level_reader = log_level + + self._noise_reader = readRNOGData() selectors = [lambda einfo: einfo.triggerType == "FORCE"] - self._noise_reader.begin(self.__noise_folders, selectors=selectors, log_level=log_level, mattak_backend=mattak_backend) + self._noise_reader.begin(self.__noise_folders, selectors=selectors, + log_level=log_level_reader, + **reader_kwargs) self.logger.info("Get event informations ...") From 300ffceefcc4702e8116d81cbb29f7ac98f16118 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 6 Apr 2023 17:49:30 +0200 Subject: [PATCH 030/102] Add explicit dtpe conversion --- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index d046a37fd..6f9460716 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -97,13 +97,14 @@ def begin(self, noise_folders, file_pattern="*", self.logger.info("Get event informations ...") # instead of reading all noise events into memory we only get certain information here and read all data in run() + noise_information = self._noise_reader.get_event_information_dict(keys=["station"]) self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) self._n_use_event = collections.defaultdict(int) - - + + def __get_noise_channel(self, channel_id): if self.__channel_mapping is None: return channel_id @@ -124,7 +125,8 @@ def run(self, evt, station, det): # select all noise events station_mask = np.full_like(self.__event_index_list, True) - i_noise = np.random.choice(self.__event_index_list[station_mask]) + # int(..) necessary because pyroot can not handle np.int64 + i_noise = int(np.random.choice(self.__event_index_list[station_mask])) self._n_use_event[i_noise] += 1 noise_event = self._noise_reader.read_event(i_noise) From 3c5f26994fa1045771887c1377c72b8541cc82ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 6 Apr 2023 18:02:17 +0200 Subject: [PATCH 031/102] small import fix --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index f8bbba1f2..dd75ba78c 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -1,5 +1,5 @@ import numpy as np - +import pandas import logging import os import time @@ -17,7 +17,6 @@ try: from rnog_data.runtable import RunTable - import pandas imported_runtable = True except ImportError: print("Import of run table failed. You will not be able to select runs! \n" From f0b50a01df3259c564dd51216019fff23fc1fc56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 12 Apr 2023 11:12:23 +0200 Subject: [PATCH 032/102] Add docstrings for one last function --- .../modules/io/rno_g/readRNOGDataMattak.py | 48 +++++++++++++------ 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index dd75ba78c..06cd576a9 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -24,23 +24,43 @@ imported_runtable = False -def baseline_correction(wfs, n_bins=128): +def baseline_correction(wfs, n_bins=128, func=np.median): + """ + Simple baseline correction function. Determines baseline in discrete chuncks of "n_bins" with + the function specified (i.e., mean or median). + + Parameters + ---------- + + wfs: np.array(n_events, n_channels, n_samples) + Waveforms of several events/channels. + + n_bins: int + Number of samples/bins in one "chunck". If None, calculate median/mean over entire trace. (Default: 128) + + func: np.mean or np.median + Function to calculate pedestal + + Returns + ------- + + wfs_corrected: np.array(n_events, n_channels, n_samples) + Baseline/pedestal corrected waveforms + """ - # Get baseline in chunks of 128 bins - # np.split -> (16, n_events, n_channels, 128) - # np.mean -> (16, n_events, n_channels) + # Example: Get baselines in chunks of 128 bins + # wfs in (n_events, n_channels, 2048) + # np.split -> (16, n_events, n_channels, 128) each waveform split in 16 chuncks + # func -> (16, n_events, n_channels) pedestal for each chunck if n_bins is not None: - medians = np.median(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) - - # Get baseline traces - # np.repeat -> (2048, n_events, n_channels) - baseline_traces = np.repeat(medians, n_bins % 2048, axis=0) + baseline_values = func(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) + + # np.repeat -> (2048, n_events, n_channels) concatenate the 16 chuncks to one baseline + baseline_traces = np.repeat(baseline_values, n_bins % 2048, axis=0) else: - medians = np.median(wfs, axis=-1) - - # Get baseline traces - # np.repeat -> (2048, n_events, n_channels) - baseline_traces = np.repeat(medians, 2048, axis=0) + baseline_values = func(wfs, axis=-1) + # np.repeat -> (2048, n_events, n_channels) concatenate the 16 chuncks to one baseline + baseline_traces = np.repeat(baseline_values, 2048, axis=0) # np.moveaxis -> (n_events, n_channels, 2048) baseline_traces = np.moveaxis(baseline_traces, 0, -1) From 4308690b4c78f2debf03ae9d085d6ca4f26ce9fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 12 Apr 2023 11:57:56 +0200 Subject: [PATCH 033/102] Add pandas to dependencies --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index d936b0ee8..577a24204 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,7 @@ toml = ">=0.10.2" uproot = "4.1.1" importlib-metadata = {version = ">=4.8.1", python = "<3.8"} numba = "*" +pandas = "*" [tool.poetry.dev-dependencies] Sphinx = "*" From b1dda670b76dec90bbe0733be3578326b8f07e9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 12 Apr 2023 12:03:41 +0200 Subject: [PATCH 034/102] Fix docstring --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 06cd576a9..2624c47e8 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -118,12 +118,11 @@ def begin(self, run_time_range=None, max_trigger_rate=1 * units.Hz, mattak_backend="auto"): - """ - + Parameters ---------- - + data_dirs: list of strings / string Path to run directories (i.e. ".../stationXX/runXXX/") @@ -132,13 +131,12 @@ def begin(self, selectors: list of lambdas List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. - Example: - trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" + Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" read_calibrated_data: bool If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. (temp. Default: False) - + apply_baseline_correction: bool Only applies when non-calibrated data are read. If true, correct for DC offset. (Default: True) From eb351d4a39f8b5cafba91a9ed984362db45cfe65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= <30903175+fschlueter@users.noreply.github.com> Date: Wed, 12 Apr 2023 14:11:46 +0200 Subject: [PATCH 035/102] Update readRNOGDataMattak.py Update doctoring --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 2624c47e8..e072fa578 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -146,7 +146,8 @@ def begin(self, (Default: True) select_runs: bool - Select runs + If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). If the run_table is + not available no selection is performed (and the programm is not interrupted, only an error message is raised). (Default: True) run_table_path: str Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) @@ -563,4 +564,4 @@ def end(self): f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" f"\n\tTime to initialize all events : {self._time_run:.2f}s" f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" - f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") \ No newline at end of file + f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") From 657cf6c12590bd3c6e88f1e944bc1eccda79412c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= <30903175+fschlueter@users.noreply.github.com> Date: Wed, 12 Apr 2023 14:54:14 +0200 Subject: [PATCH 036/102] Update readRNOGDataMattak.py Update trigger specific time offset base on plots by Steffen --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index e072fa578..19d391d14 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -75,6 +75,8 @@ def get_time_offset(trigger_type): information might be time, station, and channel dependent and should come from a database (or is already calibrated in mattak) + Current values motivated by figures posted in PR https://github.com/nu-radio/NuRadioMC/pull/519 + Parameters ---------- @@ -91,8 +93,8 @@ def get_time_offset(trigger_type): time_offsets = { "FORCE": 0, - "LT": 213 * units.ns, # ~ 1 / 3 of trace @ 2048 sample with 3.2 GSa/s - "RADIANT": 320 * units.ns # ~ 1 / 2 of trace @ 2048 sample with 3.2 GSa/s + "LT": 250 * units.ns, + "RADIANT": 475 * units.ns } if trigger_type.startswith("RADIANT"): From fdd3dc8f5fe1f207fa17f431a9d51804d0ede416 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 21 Apr 2023 13:18:25 +0200 Subject: [PATCH 037/102] Small fix in begin() --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 19d391d14..d837fdc7b 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -223,6 +223,10 @@ def begin(self, if selectors is not None: if not isinstance(selectors, (list, np.ndarray)): selectors = [selectors] + + self.logger.info(f"Found {len(selectors)} selector(s)") + + self._selectors = selectors if select_triggers is not None: if isinstance(select_triggers, str): @@ -230,9 +234,6 @@ def begin(self, else: for select_trigger in select_triggers: selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) - - self._selectors = selectors - self.logger.info(f"Found {len(self._selectors)} selector(s)") self._time_begin = 0 self._time_run = 0 From fe2d69f860fef9ed280c46321c5d095b08aa98c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 21 Apr 2023 13:48:31 +0200 Subject: [PATCH 038/102] Rename functions, implement get_event(event_id) method --- .../modules/io/rno_g/readRNOGDataMattak.py | 104 ++++++++++++++---- .../measured_noise/RNO_G/noiseImporter.py | 2 +- 2 files changed, 85 insertions(+), 21 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index d837fdc7b..2c0ac3098 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -240,6 +240,7 @@ def begin(self, self.__counter = 0 self.__skipped = 0 + self._event_informations = None self._datasets = [] self.__n_events_per_dataset = [] @@ -344,7 +345,7 @@ def __get_dataset_for_event(self, event_idx): event_index: int Same as in read_event(). - Retruns + Returns ------- dataset: mattak.Dataset.Dataset @@ -388,7 +389,7 @@ def filter_event(self, evtinfo, event_idx=None): return False - def get_event_information_dict(self, keys=["station", "run"]): + def get_event_informations(self, keys=["station", "run", "eventNumber"]): """ Return information of all events from the EventInfo This function is useful to make a pre-selection of events before actually reading them in combination with @@ -400,7 +401,7 @@ def get_event_information_dict(self, keys=["station", "run"]): keys: list(str) List of the information to receive from each event. Have to match the attributes (member variables) of the mattak.Dataset.EventInfo class (examples are "station", "run", "triggerTime", "triggerType", "eventNumber", ...). - (Default: ["station", "run"]) + (Default: ["station", "run", "eventNumber"]) Returns ------- @@ -410,26 +411,39 @@ def get_event_information_dict(self, keys=["station", "run"]): them self containing the information specified with "keys" parameter. """ - data = {} - n_prev = 0 - for dataset in self._datasets: - dataset.setEntries((0, dataset.N())) - - for idx, evtinfo in enumerate(dataset.eventInfo()): # returns a list + # Read if dict is None ... + do_read = self._event_informations is None - event_idx = idx + n_prev # event index accross all datasets combined - - if self.filter_event(evtinfo, event_idx): - continue - - data[event_idx] = {key: getattr(evtinfo, key) for key in keys} + if not do_read: + # ... or when it does not have the desired information + first_event_info = next(iter(self._event_informations)) + print(first_event_info) + for key in keys: + if key not in list(first_event_info.keys()): + do_read = True + + if do_read: + + self._event_informations = {} + n_prev = 0 + for dataset in self._datasets: + dataset.setEntries((0, dataset.N())) + + for idx, evtinfo in enumerate(dataset.eventInfo()): # returns a list - n_prev += dataset.N() + event_idx = idx + n_prev # event index accross all datasets combined + + if self.filter_event(evtinfo, event_idx): + continue + + self._event_informations[event_idx] = {key: getattr(evtinfo, key) for key in keys} + + n_prev += dataset.N() - return data + return self._event_informations - def get_event(self, event_info, waveforms): + def _get_event(self, event_info, waveforms): """ Return a NuRadioReco event Parameters @@ -516,7 +530,7 @@ def run(self): waveforms_of_event = wfs[idx] - evt = self.get_event(evtinfo, waveforms_of_event) + evt = self._get_event(evtinfo, waveforms_of_event) self._time_run += time.time() - t0 self.__counter += 1 @@ -553,12 +567,62 @@ def read_event(self, event_index): # access data waveforms = dataset.wfs() - evt = self.get_event(event_info, waveforms) + evt = self._get_event(event_info, waveforms) self._time_run += time.time() - t0 self.__counter += 1 return evt + + + def get_event(self, event_id): + """ Allows to read a specific event identifed by its id + + Parameters + ---------- + + event_id: int + Event Id + + Returns + ------- + + evt: NuRadioReco.framework.event + """ + + self.logger.debug(f"Processing event {event_id}") + t0 = time.time() + + event_infos = self.get_event_informations(keys=["eventNumber"]) + event_idx_ids = np.array([[index, ele["eventNumber"]] for index, ele in event_infos.items()]) + mask = event_idx_ids[:, 1] == event_id + + if not np.any(mask): + self.logger.info(f"Could not find event with id: {event_id}.") + return None + elif np.sum(mask) > 1: + self.logger.error(f"Found several events with the same id: {event_id}.") + raise ValueError(f"Found several events with the same id: {event_id}.") + else: + pass + + event_index = event_idx_ids[mask, 0][0] + + dataset = self.__get_dataset_for_event(event_index) + event_info = dataset.eventInfo() # returns a single eventInfo + + if self.filter_event(event_info, event_index): + return None + + # access data + waveforms = dataset.wfs() + + evt = self._get_event(event_info, waveforms) + + self._time_run += time.time() - t0 + self.__counter += 1 + + return evt def end(self): diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 6f9460716..a65617887 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -98,7 +98,7 @@ def begin(self, noise_folders, file_pattern="*", self.logger.info("Get event informations ...") # instead of reading all noise events into memory we only get certain information here and read all data in run() - noise_information = self._noise_reader.get_event_information_dict(keys=["station"]) + noise_information = self._noise_reader.get_event_informations(keys=["station"]) self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) From 22ba95814a3c155881bdbc63c3dd8da48b91d825 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 21 Apr 2023 14:08:02 +0200 Subject: [PATCH 039/102] Improve doc string --- .../modules/io/rno_g/readRNOGDataMattak.py | 42 ++++++++++++------- 1 file changed, 26 insertions(+), 16 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 2c0ac3098..a2523d29e 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -109,17 +109,19 @@ def get_time_offset(trigger_type): class readRNOGData: def begin(self, - data_dirs, log_level=logging.INFO, selectors=None, + data_dirs, read_calibrated_data=False, - apply_baseline_correction=True, - convert_to_voltage=True, select_triggers=None, select_runs=True, + apply_baseline_correction=True, + convert_to_voltage=True, + selectors=None, run_table_path=None, run_types=["physics"], run_time_range=None, max_trigger_rate=1 * units.Hz, - mattak_backend="auto"): + mattak_backend="auto", + log_level=logging.INFO): """ Parameters @@ -128,17 +130,22 @@ def begin(self, data_dirs: list of strings / string Path to run directories (i.e. ".../stationXX/runXXX/") - log_level: enum - Set verbosity level of logger - - selectors: list of lambdas - List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. - Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" - read_calibrated_data: bool If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. (temp. Default: False) - + + select_triggers: str or list(str) + Names of triggers which should be selected. Convinence interface instead of passing a selector + (see "selectors" below. (Default: None) + + select_runs: bool + If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). + If the run_table is not available no selection is performed (and the programm is not interrupted, + only an error message is raised). See parameters to configure run selection. (Default: True) + + Other Parameters + ---------------- + apply_baseline_correction: bool Only applies when non-calibrated data are read. If true, correct for DC offset. (Default: True) @@ -146,10 +153,10 @@ def begin(self, convert_to_voltage: bool Only applies when non-calibrated data are read. If true, convert ADC to voltage. (Default: True) - - select_runs: bool - If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). If the run_table is - not available no selection is performed (and the programm is not interrupted, only an error message is raised). (Default: True) + + selectors: list of lambdas + List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. + Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" run_table_path: str Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) @@ -170,6 +177,9 @@ def begin(self, mattak_backend: str Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise a "fallback" to uproot is used. (Default: "auto") + + log_level: enum + Set verbosity level of logger """ t0 = time.time() From 9b8528dbeeff8b48e7b5f3b112353e7086640b2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Sun, 23 Apr 2023 17:38:30 +0200 Subject: [PATCH 040/102] Change default max. trigger rate --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index a2523d29e..3103f1f3a 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -119,7 +119,7 @@ def begin(self, run_table_path=None, run_types=["physics"], run_time_range=None, - max_trigger_rate=1 * units.Hz, + max_trigger_rate=0 * units.Hz, mattak_backend="auto", log_level=logging.INFO): """ From 2740c072985e7bb8af93f2632fba8aef1d9ea221 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Sun, 23 Apr 2023 17:39:03 +0200 Subject: [PATCH 041/102] Rename function and variable --- .../modules/io/rno_g/readRNOGDataMattak.py | 16 ++++++++-------- .../measured_noise/RNO_G/noiseImporter.py | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 3103f1f3a..807ae6801 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -250,7 +250,7 @@ def begin(self, self.__counter = 0 self.__skipped = 0 - self._event_informations = None + self._events_information = None self._datasets = [] self.__n_events_per_dataset = [] @@ -399,7 +399,7 @@ def filter_event(self, evtinfo, event_idx=None): return False - def get_event_informations(self, keys=["station", "run", "eventNumber"]): + def get_events_information(self, keys=["station", "run", "eventNumber"]): """ Return information of all events from the EventInfo This function is useful to make a pre-selection of events before actually reading them in combination with @@ -422,11 +422,11 @@ def get_event_informations(self, keys=["station", "run", "eventNumber"]): """ # Read if dict is None ... - do_read = self._event_informations is None + do_read = self._events_information is None if not do_read: # ... or when it does not have the desired information - first_event_info = next(iter(self._event_informations)) + first_event_info = next(iter(self._events_information)) print(first_event_info) for key in keys: if key not in list(first_event_info.keys()): @@ -434,7 +434,7 @@ def get_event_informations(self, keys=["station", "run", "eventNumber"]): if do_read: - self._event_informations = {} + self._events_information = {} n_prev = 0 for dataset in self._datasets: dataset.setEntries((0, dataset.N())) @@ -446,11 +446,11 @@ def get_event_informations(self, keys=["station", "run", "eventNumber"]): if self.filter_event(evtinfo, event_idx): continue - self._event_informations[event_idx] = {key: getattr(evtinfo, key) for key in keys} + self._events_information[event_idx] = {key: getattr(evtinfo, key) for key in keys} n_prev += dataset.N() - return self._event_informations + return self._events_information def _get_event(self, event_info, waveforms): @@ -603,7 +603,7 @@ def get_event(self, event_id): self.logger.debug(f"Processing event {event_id}") t0 = time.time() - event_infos = self.get_event_informations(keys=["eventNumber"]) + event_infos = self.get_events_information(keys=["eventNumber"]) event_idx_ids = np.array([[index, ele["eventNumber"]] for index, ele in event_infos.items()]) mask = event_idx_ids[:, 1] == event_id diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index a65617887..9ae0955de 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -98,7 +98,7 @@ def begin(self, noise_folders, file_pattern="*", self.logger.info("Get event informations ...") # instead of reading all noise events into memory we only get certain information here and read all data in run() - noise_information = self._noise_reader.get_event_informations(keys=["station"]) + noise_information = self._noise_reader.get_events_information(keys=["station"]) self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) From d4ba05deb6a3ed91ffed44c5ba03d970f966cdc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Sun, 23 Apr 2023 17:40:22 +0200 Subject: [PATCH 042/102] Make function privat --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 807ae6801..15f03eee7 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -370,7 +370,7 @@ def __get_dataset_for_event(self, event_idx): return dataset - def filter_event(self, evtinfo, event_idx=None): + def _filter_event(self, evtinfo, event_idx=None): """ Filter an event base on its EventInfo and the configured selectors. Parameters @@ -443,7 +443,7 @@ def get_events_information(self, keys=["station", "run", "eventNumber"]): event_idx = idx + n_prev # event index accross all datasets combined - if self.filter_event(evtinfo, event_idx): + if self._filter_event(evtinfo, event_idx): continue self._events_information[event_idx] = {key: getattr(evtinfo, key) for key in keys} @@ -531,7 +531,7 @@ def run(self): self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") t0 = time.time() - if self.filter_event(evtinfo, event_idx): + if self._filter_event(evtinfo, event_idx): continue # Just read wfs if necessary @@ -571,7 +571,7 @@ def read_event(self, event_index): dataset = self.__get_dataset_for_event(event_index) event_info = dataset.eventInfo() # returns a single eventInfo - if self.filter_event(event_info, event_index): + if self._filter_event(event_info, event_index): return None # access data @@ -621,7 +621,7 @@ def get_event(self, event_id): dataset = self.__get_dataset_for_event(event_index) event_info = dataset.eventInfo() # returns a single eventInfo - if self.filter_event(event_info, event_index): + if self._filter_event(event_info, event_index): return None # access data From 9ad3bb74b2a82148647e084cbfb5cd57b17ffa17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Sun, 23 Apr 2023 17:44:23 +0200 Subject: [PATCH 043/102] Rename function --- NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py | 2 +- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py index 15f03eee7..043051ba2 100644 --- a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py @@ -549,7 +549,7 @@ def run(self): - def read_event(self, event_index): + def get_event_by_index(self, event_index): """ Allows to read a specific event identifed by its index Parameters diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 9ae0955de..eae85ea2d 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -128,7 +128,7 @@ def run(self, evt, station, det): # int(..) necessary because pyroot can not handle np.int64 i_noise = int(np.random.choice(self.__event_index_list[station_mask])) self._n_use_event[i_noise] += 1 - noise_event = self._noise_reader.read_event(i_noise) + noise_event = self._noise_reader.get_event_by_index(i_noise) station_id = noise_event.get_station_ids()[0] noise_station = noise_event.get_station(station_id) From cfaea77b3496f7441cb27a03a7cc9aa564cf20ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Sun, 23 Apr 2023 17:50:18 +0200 Subject: [PATCH 044/102] Correct import after renaming --- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index eae85ea2d..16fa0cfe5 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -4,7 +4,7 @@ import random import collections -from NuRadioReco.modules.io.rno_g.readRNOGDataMattak import readRNOGData +from NuRadioReco.modules.io.RNO_G.readRNOGDataMattak import readRNOGData from NuRadioReco.modules.base.module import register_run from NuRadioReco.utilities import units From 8bef940016eca205e11ada6c324543ae61e6b5fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Sun, 23 Apr 2023 17:55:11 +0200 Subject: [PATCH 045/102] Renamed directory, removed old reader classes/files --- .../modules/io/{rno_g => RNO_G}/__init__.py | 0 .../io/{rno_g => RNO_G}/readRNOGDataMattak.py | 0 NuRadioReco/modules/io/rno_g/readRNOGData.py | 224 ------------------ .../modules/io/rno_g/rnogDataReader.py | 126 ---------- 4 files changed, 350 deletions(-) rename NuRadioReco/modules/io/{rno_g => RNO_G}/__init__.py (100%) rename NuRadioReco/modules/io/{rno_g => RNO_G}/readRNOGDataMattak.py (100%) delete mode 100755 NuRadioReco/modules/io/rno_g/readRNOGData.py delete mode 100644 NuRadioReco/modules/io/rno_g/rnogDataReader.py diff --git a/NuRadioReco/modules/io/rno_g/__init__.py b/NuRadioReco/modules/io/RNO_G/__init__.py similarity index 100% rename from NuRadioReco/modules/io/rno_g/__init__.py rename to NuRadioReco/modules/io/RNO_G/__init__.py diff --git a/NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py similarity index 100% rename from NuRadioReco/modules/io/rno_g/readRNOGDataMattak.py rename to NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py diff --git a/NuRadioReco/modules/io/rno_g/readRNOGData.py b/NuRadioReco/modules/io/rno_g/readRNOGData.py deleted file mode 100755 index e1fc23d08..000000000 --- a/NuRadioReco/modules/io/rno_g/readRNOGData.py +++ /dev/null @@ -1,224 +0,0 @@ -import NuRadioReco.framework.event -from NuRadioReco.modules.base.module import register_run -import NuRadioReco.framework.station -import NuRadioReco.framework.channel -import NuRadioReco.framework.trigger -import NuRadioReco.modules.channelSignalReconstructor -signal_reconstructor = NuRadioReco.modules.channelSignalReconstructor.channelSignalReconstructor() - -import uproot -# default uproot readout is awkward arrays, but requires awkward package installed. RNOG data format does not require this. Use numpy instead. -uproot.default_library = "np" - -import numpy as np -from NuRadioReco.utilities import units -import sys -import os -import logging -import time -from scipy import interpolate -import six -from collections import OrderedDict -import astropy.time - - -class readRNOGData: - """ - This is the data reader for RNO-G. Reads RNO-G data from ROOT format using uproot - """ - def __init__(self): - self.logger = logging.getLogger("NuRadioReco.readRNOGdata") - self.__id_current_event = None - self.__t = None - self.__sampling_rate = 3.2 * units.GHz #TODO: 3.2 at the beginning of deployment. Will change to 2.4 GHz after firmware update eventually, but info not yet contained in the .root files. Read out once available. - self._iterator_data = None - self._iterator_header = None - self._data_treename = "waveforms" - self._header_treename = "header" - self.n_events = None - self.input_files = [] - - def begin(self, input_files, input_files_header=None): - - """ - Begin function of the RNO-G reader - - Parameters - ---------- - input_files: list of paths to files containing waveforms - input_files_header: list of paths to files containing header, - if None, headers are expected to be stored in the input_files also - """ - - self.__id_current_event = -1 - self.__t = time.time() - - if isinstance(input_files, six.string_types): - input_files = [input_files] - if isinstance(input_files_header, six.string_types): - input_files_header = [input_files_header] - if input_files_header is None: - input_files_header = input_files - - self.input_files = input_files - self.input_files_header = input_files_header - - self.n_events = 0 - # get the total number of events of all input files - for filename in input_files: - file = uproot.open(filename) - if 'combined' in file: - file = file['combined'] - self.n_events += file[self._data_treename].num_entries - self._set_iterators() - - return self.n_events - - def _set_iterators(self, cut=None): - """ - Set uproot iterators to loop over event trees - - Parameters - ---------- - cut: str - cut string to apply (e.g. for initial event selection based on event_number, ... - e.g. "(event_number==1)" or "(run_number==1)&(event_number<10)" - """ - self.__id_current_event = -1 - - datadict = OrderedDict() - for filename in self.input_files: - if 'combined' in uproot.open(filename): - datadict[filename] = 'combined/' + self._data_treename - else: - datadict[filename] = self._data_treename - - headerdict = OrderedDict() - for filename in self.input_files_header: - if 'combined' in uproot.open(filename): - headerdict[filename] = 'combined/' + self._header_treename - else: - headerdict[filename] = self._header_treename - - # iterator over single events (step 1), for event looping in NuRadioReco dataformat - # may restrict which data to read in the iterator by adding second argument - # read_branches = ['run_number', 'event_number', 'station_number', 'radiant_data[24][2048]'] - self._iterator_data = uproot.iterate(datadict, cut=cut,step_size=1, how=dict, library="np") - self._iterator_header = uproot.iterate(headerdict, cut=cut, step_size=1, how=dict, library="np") - - self.uproot_iterator_data = uproot.iterate(datadict, cut=cut, step_size=1000) - self.uproot_iterator_header = uproot.iterate(headerdict, cut=cut, step_size=1000) - - @register_run() - def run(self, channels=np.arange(24), event_numbers=None, run_numbers=None, cut_string=None): - """ - Run function of the RNOG reader - - Parameters - ---------- - n_channels: int - number of RNOG channels to loop over, default 24 - - event_numbers: None or dict - if dict, use a dict with run number as key and list of event numbers as items - - run_numbers: None or list - list of run numbers to select - Caveat: use only if event_numbers are not set - - cut_string: string - selection string for event pre-selection - Cavieat: use only if event_numbers and run_numbers are not set - """ - - # generate cut string based on passed event_numbers or run_numbers parameters - if not run_numbers is None: - event_cuts = "|".join(["(run_number==%i)" for run_number in run_numbers]) - cut_string = "|".join(event_cuts) - if not event_numbers is None: - event_cuts = [] - for run in event_numbers: - events = event_numbers[run] - for event in events: - event_cuts.append("(run_number==%i)&(event_number==%i)" %(run, event)) - cut_string = "|".join(event_cuts) - self.cut_string = cut_string - - self._set_iterators(cut=self.cut_string) - root_trigger_keys = [ - 'trigger_info.rf_trigger', 'trigger_info.force_trigger', - 'trigger_info.pps_trigger', 'trigger_info.ext_trigger', - 'trigger_info.radiant_trigger', 'trigger_info.lt_trigger', - 'trigger_info.surface_trigger' - ] - self.__t = time.time() - # Note: reading single events is inefficient... - # for event_header, event in zip(self._iterator_header, self._iterator_data): - for event_headers, events in zip(self.uproot_iterator_header, self.uproot_iterator_data): - for event_header, event in zip(event_headers, events): - self.__id_current_event += 1 - #if self.__id_current_event >= self.n_events: - # # all events processed, but iterator should stop before anyways. - # break - if self.__id_current_event % 1000 == 0: - progress = 1. * self.__id_current_event / self.n_events - eta = 0 - if self.__id_current_event > 0: - eta = (time.time() - self.__t) / self.__id_current_event * (self.n_events - self.__id_current_event) / 60. - self.logger.warning("reading in event {}/{} ({:.0f}%) ETA: {:.1f} minutes".format(self.__id_current_event, self.n_events, 100 * progress, eta)) - - run_number = event["run_number"] - evt_number = event["event_number"] - station_id = event_header["station_number"] - self.logger.info("Reading Run: {run_number}, Event {evt_number}, Station {station_id}") - - evt = NuRadioReco.framework.event.Event(run_number, evt_number) - station = NuRadioReco.framework.station.Station(station_id) - #TODO in future: do need to apply calibrations? - - unix_time = event_header["trigger_time"] - event_time = astropy.time.Time(unix_time, format='unix') - - station.set_station_time(event_time) - for trigger_key in root_trigger_keys: - try: - has_triggered = bool(event_header[trigger_key]) - trigger = NuRadioReco.framework.trigger.Trigger(trigger_key.split('.')[-1]) - trigger.set_triggered(has_triggered) - station.set_trigger(trigger) - except ValueError: - pass - - radiant_data = event["radiant_data[24][2048]"] # returns array of n_channels, n_points - # Loop over all requested channels in data - for chan in channels: - channel = NuRadioReco.framework.channel.Channel(chan) - - # Get data from array via graph method - voltage = np.array(radiant_data[chan]) * units.mV - #times = np.arange(len(voltage)) * sampling - - if voltage.shape[0] % 2 != 0: - voltage = voltage[:-1] - - #TODO: need to subtract mean... probably not if running signal reconstructor? - #channel.set_trace(voltage-np.mean(voltage), sampling_rate) - channel.set_trace(voltage, self.__sampling_rate) - station.add_channel(channel) - evt.set_station(station) - # we want to have access to basic signal quantities with implementation from NuRadioReco - #TODO: maybe this should be run in external module? - signal_reconstructor.run(evt, station, None) - yield evt - - def get_events(self): - return self.run() - - def get_n_events(self): - return self.n_events - - def get_filenames(self): - return self.input_files - - def end(self): - pass diff --git a/NuRadioReco/modules/io/rno_g/rnogDataReader.py b/NuRadioReco/modules/io/rno_g/rnogDataReader.py deleted file mode 100644 index cea0be080..000000000 --- a/NuRadioReco/modules/io/rno_g/rnogDataReader.py +++ /dev/null @@ -1,126 +0,0 @@ -import numpy as np -import uproot -import NuRadioReco.framework.event -import NuRadioReco.framework.station -import NuRadioReco.framework.channel -from NuRadioReco.utilities import units -import astropy.time -import glob -import logging -import time -from functools import lru_cache -import six -import NuRadioReco.utilities.metaclasses - -logger = logging.getLogger("RNO-G_IO") -# logger.setLevel(logging.DEBUG) - -# @six.add_metaclass(NuRadioReco.utilities.metaclasses.Singleton) # maybe? -class RNOGDataReader: - - def __init__(self, filenames, *args, **kwargs): - logger.debug("Initializing RNOGDataReader") - self.__filenames = filenames - self.__event_ids = None - self.__sampling_rate = 3.2 * units.GHz #TODO: 3.2 at the beginning of deployment. Will change to 2.4 GHz after firmware update eventually, but info not yet contained in the .root files. Read out once available. - self.__parse_event_ids() - self.__i_events_per_file = np.zeros((len(self.__filenames), 2), dtype=int) - i_event = 0 - for i_file, filename in enumerate(filenames): - file = self.__open_file(filename) - events_in_file = file['waveforms'].num_entries - self.__i_events_per_file[i_file] = [i_event, i_event + events_in_file] - i_event += events_in_file - - self._root_trigger_keys = [ - 'trigger_info.rf_trigger', 'trigger_info.force_trigger', - 'trigger_info.pps_trigger', 'trigger_info.ext_trigger', - 'trigger_info.radiant_trigger', 'trigger_info.lt_trigger', - 'trigger_info.surface_trigger' - ] - - def get_filenames(self): - return self.__filenames - - def get_event_ids(self): - if self.__event_ids is None: - return self.__parse_event_ids() - return self.__event_ids - - def __parse_event_ids(self): - logger.debug('Parsing event ids') - event_ids = np.array([], dtype=int) - run_numbers = np.array([], dtype=int) - for filename in self.__filenames: - file = self.__open_file(filename) - event_ids = np.append(event_ids, file['waveforms']['event_number'].array(library='np').astype(int)) - run_numbers = np.append(run_numbers, file['header']['run_number'].array(library='np').astype(int)) - self.__event_ids = np.array([run_numbers, event_ids]).T - - def __open_file(self, filename): - logger.debug("Opening file {}".format(filename)) - file = uproot.open(filename) - if 'combined' in file: - file = file['combined'] - return file - - def get_n_events(self): - return self.get_event_ids().shape[0] - - # @lru_cache(maxsize=1) # probably not actually relevant outside the data viewer? - def get_event_i(self, i_event): - read_time = time.time() - event = NuRadioReco.framework.event.Event(*self.get_event_ids()[i_event]) - for i_file, filename in enumerate(self.__filenames): - if self.__i_events_per_file[i_file, 0] <= i_event < self.__i_events_per_file[i_file, 1]: - i_event_in_file = i_event - self.__i_events_per_file[i_file, 0] - file = self.__open_file(self.__filenames[i_file]) - station = NuRadioReco.framework.station.Station((file['waveforms']['station_number'].array(library='np', entry_start=i_event_in_file, entry_stop=(i_event_in_file+1))[0])) - # station not set properly in first runs, try from header - if station.get_id() == 0 and 'header' in file: - station = NuRadioReco.framework.station.Station((file['header']['station_number'].array(library='np', entry_start=i_event_in_file, entry_stop=(i_event_in_file+1))[0])) - station.set_is_neutrino() - - if 'header' in file: - unix_time = file['header']['readout_time'].array(library='np', entry_start=i_event_in_file, entry_stop=(i_event_in_file+1))[0] - event_time = astropy.time.Time(unix_time, format='unix') - station.set_station_time(event_time) - ### read in basic trigger data - for trigger_key in self._root_trigger_keys: - try: - has_triggered = bool(file['header'][trigger_key].array(library='np', entry_start=i_event_in_file, entry_stop=(i_event_in_file+1))[0]) - trigger = NuRadioReco.framework.trigger.Trigger(trigger_key.split('.')[-1]) - trigger.set_triggered(has_triggered) - # trigger.set_trigger_time(file['header']['trigger_time']) - station.set_trigger(trigger) - except uproot.exceptions.KeyInFileError: - pass - - waveforms = file['waveforms']['radiant_data[24][2048]'].array(library='np', entry_start=i_event_in_file, entry_stop=(i_event_in_file+1)) - for i_channel in range(waveforms.shape[1]): - channel = NuRadioReco.framework.channel.Channel(i_channel) - channel.set_trace(waveforms[0, i_channel]*units.mV, self.__sampling_rate) - station.add_channel(channel) - event.set_station(station) - logger.debug("Spent {:.0f} ms reading event {}".format((time.time()-read_time) * 1e3, i_event)) - return event - return None - - def get_event(self, event_id): - find_event = np.where((self.get_event_ids()[:,0] == event_id[0]) & (self.get_event_ids()[:,1] == event_id[1]))[0] - if len(find_event) == 0: - return None - elif len(find_event) == 1: - return self.get_event_i(find_event[0]) - else: - raise RuntimeError('There are multiple events with the ID [{}, {}] in the file'.format(event_id[0], event_id[1])) - - def get_events(self): - for ev_i in range(self.get_n_events()): - yield self.get_event_i(ev_i) - - def get_detector(self): - return None - - def get_header(self): - return None From b097ff14c0b13ea42417d4d2d67aa2a1a53e038a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 24 Apr 2023 13:53:09 +0200 Subject: [PATCH 046/102] Refactor/cosmetics base_trace.py --- NuRadioReco/framework/base_trace.py | 29 ++++++++++++++++++++--------- 1 file changed, 20 insertions(+), 9 deletions(-) diff --git a/NuRadioReco/framework/base_trace.py b/NuRadioReco/framework/base_trace.py index 0b735967f..c79c86c29 100644 --- a/NuRadioReco/framework/base_trace.py +++ b/NuRadioReco/framework/base_trace.py @@ -36,7 +36,7 @@ def get_trace(self): trace: np.array of floats the time trace """ - if(not self.__time_domain_up_to_date): + if not self.__time_domain_up_to_date: self._time_trace = fft.freq2time(self._frequency_spectrum, self._sampling_rate) self.__time_domain_up_to_date = True self._frequency_spectrum = None @@ -62,10 +62,10 @@ def get_filtered_trace(self, passband, filter_type='butter', order=10, rp=None): return fft.freq2time(spec, self.get_sampling_rate()) def get_frequency_spectrum(self): - if(self.__time_domain_up_to_date): + if self.__time_domain_up_to_date: self._frequency_spectrum = fft.time2freq(self._time_trace, self._sampling_rate) self._time_trace = None -# logger.debug("frequency spectrum has shape {}".format(self._frequency_spectrum.shape)) + # logger.debug("frequency spectrum has shape {}".format(self._frequency_spectrum.shape)) self.__time_domain_up_to_date = False return np.copy(self._frequency_spectrum) @@ -82,7 +82,8 @@ def set_trace(self, trace, sampling_rate): """ if trace is not None: if trace.shape[trace.ndim - 1] % 2 != 0: - raise ValueError('Attempted to set trace with an uneven number ({}) of samples. Only traces with an even number of samples are allowed.'.format(trace.shape[trace.ndim - 1])) + raise ValueError(('Attempted to set trace with an uneven number ({}) of samples. ' + 'Only traces with an even number of samples are allowed.').format(trace.shape[trace.ndim - 1])) self.__time_domain_up_to_date = True self._time_trace = np.copy(trace) self._sampling_rate = sampling_rate @@ -109,9 +110,10 @@ def get_times(self): try: length = self.get_number_of_samples() times = np.arange(0, length / self._sampling_rate - 0.1 / self._sampling_rate, 1. / self._sampling_rate) + self._trace_start_time - if(len(times) != length): - logger.error("time array does not have the same length as the trace. n_samples = {:d}, sampling rate = {:.5g}".format(length, self._sampling_rate)) - raise ValueError("time array does not have the same length as the trace") + if len(times) != length: + err = f"time array does not have the same length as the trace. n_samples = {length:d}, sampling rate = {self._sampling_rate:.5g}" + logger.error(err) + raise ValueError(err) except: times = np.array([]) return times @@ -148,7 +150,7 @@ def get_number_of_samples(self): n_samples: int number of samples in time domain """ - if(self.__time_domain_up_to_date): + if self.__time_domain_up_to_date: length = self._time_trace.shape[-1] # returns the correct length independent of the dimension of the array (channels are 1dim, efields are 3dim) else: length = (self._frequency_spectrum.shape[-1] - 1) * 2 @@ -184,12 +186,14 @@ def resample(self, sampling_rate): if resampling_factor.numerator != 1: # resample and use axis -1 since trace might be either shape (N) for analytic trace or shape (3,N) for E-field resampled_trace = scipy.signal.resample(resampled_trace, resampling_factor.numerator * self.get_number_of_samples(), axis=-1) + if resampling_factor.denominator != 1: # resample and use axis -1 since trace might be either shape (N) for analytic trace or shape (3,N) for E-field resampled_trace = scipy.signal.resample(resampled_trace, np.shape(resampled_trace)[-1] // resampling_factor.denominator, axis=-1) if resampled_trace.shape[-1] % 2 != 0: resampled_trace = resampled_trace.T[:-1].T + self.set_trace(resampled_trace, sampling_rate) def serialize(self): @@ -201,7 +205,7 @@ def serialize(self): def deserialize(self, data_pkl): data = pickle.loads(data_pkl) self.set_trace(data['time_trace'], data['sampling_rate']) - if('trace_start_time' in data.keys()): + if 'trace_start_time' in data.keys(): self.set_trace_start_time(data['trace_start_time']) def __add__(self, x): @@ -214,10 +218,13 @@ def __add__(self, x): # Some sanity checks if not isinstance(x, BaseTrace): raise TypeError('+ operator is only defined for 2 BaseTrace objects') + if self.get_trace() is None or x.get_trace() is None: raise ValueError('One of the trace objects has no trace set') + if self.get_trace().ndim != x.get_trace().ndim: raise ValueError('Traces have different dimensions') + if self.get_sampling_rate() != x.get_sampling_rate(): # Upsample trace with lower sampling rate # Create new baseTrace object for the resampling so we don't change the originals @@ -249,10 +256,12 @@ def __add__(self, x): first_trace = trace_2 second_trace = trace_1 trace_start = x.get_trace_start_time() + # Calculate the difference in the trace start time between the traces and the number of # samples that time difference corresponds to time_offset = np.abs(x.get_trace_start_time() - self.get_trace_start_time()) i_start = int(round(time_offset * sampling_rate)) + # We have to distinguish 2 cases: Trace is 1D (channel) or 2D(E-field) # and treat them differently if trace_1.ndim == 1: @@ -273,11 +282,13 @@ def __add__(self, x): early_trace[:, :first_trace.shape[1]] = first_trace late_trace = np.zeros((second_trace.shape[0], trace_length)) late_trace[:, :second_trace.shape[1]] = second_trace + # Correct for different trace start times by using fourier shift theorem to # shift the later trace backwards. late_trace_object = BaseTrace() late_trace_object.set_trace(late_trace, sampling_rate) late_trace_object.apply_time_shift(time_offset, True) + # Create new BaseTrace object holding the summed traces new_trace = BaseTrace() new_trace.set_trace(early_trace + late_trace_object.get_trace(), sampling_rate) From 7fbdefbf28f4205a7936faeb2eceec0429196211 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 24 Apr 2023 14:28:29 +0200 Subject: [PATCH 047/102] Change default for select_runs, improve doc string --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 043051ba2..97963de61 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -112,7 +112,7 @@ def begin(self, data_dirs, read_calibrated_data=False, select_triggers=None, - select_runs=True, + select_runs=False, apply_baseline_correction=True, convert_to_voltage=True, selectors=None, @@ -132,7 +132,7 @@ def begin(self, read_calibrated_data: bool If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. - (temp. Default: False) + (temp. Default: False, this can/should be switched once the calibration in incorp. into Mattak) select_triggers: str or list(str) Names of triggers which should be selected. Convinence interface instead of passing a selector @@ -141,7 +141,7 @@ def begin(self, select_runs: bool If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). If the run_table is not available no selection is performed (and the programm is not interrupted, - only an error message is raised). See parameters to configure run selection. (Default: True) + only an error message is raised). See parameters to configure run selection. (Default: False) Other Parameters ---------------- From 9d41271ca9e38f8f25431e664ce6eeaab80e7317 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Mon, 24 Apr 2023 14:38:46 +0200 Subject: [PATCH 048/102] Fix doc-string, implement random generator --- .../RNO_data/read_data_example/read_rnog.py | 2 +- .../measured_noise/RNO_G/noiseImporter.py | 17 ++++++++++------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py index a88ac77b9..950e8c3cb 100644 --- a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py +++ b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py @@ -1,4 +1,4 @@ -from NuRadioReco.modules.io.rno_g import readRNOGDataMattak +from NuRadioReco.modules.io.RNO_G import readRNOGDataMattak from NuRadioReco.modules.io import eventWriter from NuRadioReco.utilities import units diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 16fa0cfe5..738fd4b59 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -1,7 +1,6 @@ import numpy as np import glob import os -import random import collections from NuRadioReco.modules.io.RNO_G.readRNOGDataMattak import readRNOGData @@ -21,7 +20,7 @@ class noiseImporter: def begin(self, noise_folders, file_pattern="*", match_station_id=False, station_ids=None, channel_mapping=None, scramble_noise_file_order=True, - log_level=logging.INFO, reader_kwargs={}): + log_level=logging.INFO, random_seed=None, reader_kwargs={}): """ Parameters @@ -29,8 +28,8 @@ def begin(self, noise_folders, file_pattern="*", noise_folders: str or list(str) Folder(s) containing noise file(s). Search in any subfolder as well. - file_patters: str - File patters used to search for directories, (Default: "*", other examples might be "combined") + file_pattern: str + File pattern used to search for directories, (Default: "*", other examples might be "combined") match_station_id: bool If True, add only noise from stations with the same id. (Default: False) @@ -48,7 +47,10 @@ def begin(self, noise_folders, file_pattern="*", If True, randomize the order of noise files before reading them. (Default: True) log_level: loggging log level - the log level, default logging.INFO + The log level to controll verbosity. (Default: logging.INFO) + + random_seed: int + Seed for the random number generator. (Default: None, no fixed seed). reader_kwargs: dict Optional arguements passed to readRNOGDataMattak @@ -56,6 +58,7 @@ def begin(self, noise_folders, file_pattern="*", self.logger = logging.getLogger('NuRadioReco.RNOG.noiseImporter') self.logger.setLevel(log_level) + self.__random_gen = np.random.Generator(np.random.Philox(random_seed)) self._match_station_id = match_station_id self.__station_ids = station_ids @@ -81,7 +84,7 @@ def begin(self, noise_folders, file_pattern="*", raise ValueError if scramble_noise_file_order: - random.shuffle(self.__noise_folders) + self.__random_gen.shuffle(self.__noise_folders) if "log_level" in reader_kwargs: log_level_reader = reader_kwargs.pop("log_level") @@ -126,7 +129,7 @@ def run(self, evt, station, det): station_mask = np.full_like(self.__event_index_list, True) # int(..) necessary because pyroot can not handle np.int64 - i_noise = int(np.random.choice(self.__event_index_list[station_mask])) + i_noise = int(self.__random_gen.choice(self.__event_index_list[station_mask])) self._n_use_event[i_noise] += 1 noise_event = self._noise_reader.get_event_by_index(i_noise) From ebdcab3535a2e92b92dcfce5d0ed4034247a58b7 Mon Sep 17 00:00:00 2001 From: Sjoerd Bouma Date: Tue, 25 Apr 2023 16:30:06 +0200 Subject: [PATCH 049/102] added mattak, run table and pandas as optional RNO-G dependencies --- pyproject.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 577a24204..7fb653fb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,6 @@ toml = ">=0.10.2" uproot = "4.1.1" importlib-metadata = {version = ">=4.8.1", python = "<3.8"} numba = "*" -pandas = "*" [tool.poetry.dev-dependencies] Sphinx = "*" @@ -51,9 +50,13 @@ proposal = "7.5.1" pygdsm = {git = "https://github.com/telegraphic/pygdsm"} nifty5 = {git = "https://gitlab.mpcdf.mpg.de/ift/nifty.git", branch="NIFTy_5"} pypocketfft = {git = "https://gitlab.mpcdf.mpg.de/mtr/pypocketfft"} +pandas = "*" +mattak = {git = "https://github.com/RNO-G/mattak"} +runtable = {git = "ssh://git@github.com/RNO-G/rnog-runtable.git"} [tool.poetry.extras] documentation = ["Sphinx", "sphinx-rtd-theme", "numpydoc"] proposal = ["proposal"] galacticnoise = ['pygdsm'] ift_reco = ['nifty5', 'pypocketfft'] +RNO_G_DATA = ["mattak", "runtable", "pandas"] From 22755e68368006ece06584d173c20352e1fd6cab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 25 Apr 2023 18:49:54 +0200 Subject: [PATCH 050/102] Set meaningful default arguments for MattakReader in NoiseImporter after the default arugments for the MattakReader changed --- .../modules/io/RNO_G/readRNOGDataMattak.py | 6 ++--- .../measured_noise/RNO_G/noiseImporter.py | 24 +++++++++---------- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 97963de61..ac5da9bdb 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -280,11 +280,9 @@ def begin(self, self._n_events_total = np.sum(self.__n_events_per_dataset) self._time_begin = time.time() - t0 + self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") self.logger.info(f"{self._n_events_total} events in {len(self._datasets)} runs/datasets have been found.") - - # Variable not yet implemented in mattak - # self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") - + if not self._n_events_total: err = "No runs have been selected. Abort ..." self.logger.error(err) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 738fd4b59..305bd7fee 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -85,22 +85,20 @@ def begin(self, noise_folders, file_pattern="*", if scramble_noise_file_order: self.__random_gen.shuffle(self.__noise_folders) - - if "log_level" in reader_kwargs: - log_level_reader = reader_kwargs.pop("log_level") - else: - log_level_reader = log_level - - self._noise_reader = readRNOGData() - selectors = [lambda einfo: einfo.triggerType == "FORCE"] - self._noise_reader.begin(self.__noise_folders, selectors=selectors, - log_level=log_level_reader, - **reader_kwargs) + self._noise_reader = readRNOGData() + + default_reader_kwargs = { + "selectors": [lambda einfo: einfo.triggerType == "FORCE"], + "log_level": log_level, "select_runs": True, "max_trigger_rate": 2 * units.Hz, + "run_types": ["physics"] + } + default_reader_kwargs.update(reader_kwargs) + + self._noise_reader.begin(self.__noise_folders, **default_reader_kwargs) - self.logger.info("Get event informations ...") # instead of reading all noise events into memory we only get certain information here and read all data in run() - + self.logger.info("Get event informations ...") noise_information = self._noise_reader.get_events_information(keys=["station"]) self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) From c5fdc1da723ae2e21fc3d63e2942707ce420f6d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 18:56:30 +0200 Subject: [PATCH 051/102] Extend interface of get_event to take the run number as well --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index ac5da9bdb..6f4dbfdf2 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -583,11 +583,14 @@ def get_event_by_index(self, event_index): return evt - def get_event(self, event_id): + def get_event(self, run_nr, event_id): """ Allows to read a specific event identifed by its id Parameters ---------- + + run_nr: int + Run number event_id: int Event Id @@ -601,9 +604,9 @@ def get_event(self, event_id): self.logger.debug(f"Processing event {event_id}") t0 = time.time() - event_infos = self.get_events_information(keys=["eventNumber"]) - event_idx_ids = np.array([[index, ele["eventNumber"]] for index, ele in event_infos.items()]) - mask = event_idx_ids[:, 1] == event_id + event_infos = self.get_events_information(keys=["eventNumber", "run"]) + event_idx_ids = np.array([[index, ele["eventNumber"], ele["run"]] for index, ele in event_infos.items()]) + mask = np.all([event_idx_ids[:, 1] == event_id, event_idx_ids[:, 2] == run_nr], axis=0) if not np.any(mask): self.logger.info(f"Could not find event with id: {event_id}.") From bde35fa7a774709b520f096b6e5f290b85ae16a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 18:57:02 +0200 Subject: [PATCH 052/102] Fix small bug in begin() --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 6f4dbfdf2..2035f790b 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -235,8 +235,6 @@ def begin(self, selectors = [selectors] self.logger.info(f"Found {len(selectors)} selector(s)") - - self._selectors = selectors if select_triggers is not None: if isinstance(select_triggers, str): @@ -244,6 +242,8 @@ def begin(self, else: for select_trigger in select_triggers: selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) + + self._selectors = selectors self._time_begin = 0 self._time_run = 0 From 9d19a81be7330f586131c3325db4b8ba0cd65b5a Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 2 May 2023 11:38:49 +0200 Subject: [PATCH 053/102] Update README.md --- README.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 799965a22..98bd87634 100644 --- a/README.md +++ b/README.md @@ -32,14 +32,16 @@ NuRadioMC is continuously improved and new features are being added. The followi If you would like to contribute, please contact @cg-laser or @anelles for permissions to work on NuRadioMC. We work with pull requests only that can be merged after review. Also please visit https://nu-radio.github.io/NuRadioMC/Introduction/pages/contributing.html for details on our workflow and coding conventions. + +## Publications builing up on NuRadioMC/Reco NuRadioMC is used in an increasing number of studies. To get an overview for what NuRadioMC can be used for, please have a look at the following publications or see [here](https://inspirehep.net/literature?sort=mostrecent&size=25&page=1&q=refersto%3Arecid%3A1738571%20or%20refersto%3Arecid%3A1725583): -* V. B. Valera, M. Bustamante and C. Glaser, “Near-future discovery of the diffuse flux of ultra-high-energy cosmic neutrinos”, [arXiv:2210.03756](https://arxiv.org/abs/2210.03756) -* Alfonso Garcia Soto, Diksha Garg, Mary Hall Reno, Carlos A. Argüelles, "Probing Quantum Gravity with Elastic Interactions of Ultra-High-Energy Neutrinos", [arXiv:2209.06282](https://arxiv.org/abs/2209.06282) -* Damiano F. G. Fiorillo, Mauricio Bustamante, Victor B. Valera, "Near-future discovery of point sources of ultra-high-energy neutrinos", [arXiv:2205.15985](https://arxiv.org/abs/2205.15985) +* V. B. Valera, M. Bustamante and C. Glaser, “Near-future discovery of the diffuse flux of ultra-high-energy cosmic neutrinos”, Phys. Rev. D 107, 043019 [arXiv:2210.03756](https://arxiv.org/abs/2210.03756) +* Alfonso Garcia Soto, Diksha Garg, Mary Hall Reno, Carlos A. Argüelles, "Probing Quantum Gravity with Elastic Interactions of Ultra-High-Energy Neutrinos", Phys. Rev. D 107, 033009 (2023) [arXiv:2209.06282](https://arxiv.org/abs/2209.06282) +* Damiano F. G. Fiorillo, Mauricio Bustamante, Victor B. Valera, "Near-future discovery of point sources of ultra-high-energy neutrinos", JCAP 03 (2023) 026 [arXiv:2205.15985](https://arxiv.org/abs/2205.15985) * C. Glaser, S. McAleer, S. Stjärnholm, P. Baldi, S. W. Barwick, “Deep learning reconstruction of the neutrino direction and energy from in-ice radio detector data”, Astroparticle Physics 145, (2023) 102781, [doi:10.1016/j.astropartphys.2022.102781](https://doi.org/10.1016/j.astropartphys.2022.102781), [arXiv:2205.15872](https://arxiv.org/abs/2205.15872) -* J. Beise and C. Glaser, “In-situ calibration system for the measurement of the snow accumulation and the index-of-refraction profile for radio neutrino detectors”, [arXiv:2205.00726](https://arxiv.org/abs/2205.00726) +* J. Beise and C. Glaser, “In-situ calibration system for the measurement of the snow accumulation and the index-of-refraction profile for radio neutrino detectors”, Journal of Instrumentation 18 P01036 (2023), [arXiv:2205.00726](https://arxiv.org/abs/2205.00726) * V. B. Valera, M. Bustamante and C. Glaser, “The ultra-high-energy neutrino-nucleon cross section: measurement forecasts for an era of cosmic EeV-neutrino discovery”, Journal of High Energy Physics 06 (2022) 105, [doi:10.1007/JHEP06(2022)105](https://doi.org/10.1007/JHEP06(2022%29105), [arXiv:2204.04237](https://arxiv.org/abs/2204.04237) * ARIANNA collaboration (A. Anker et al.), “Measuring the Polarization Reconstruction Resolution of the ARIANNA Neutrino Detector with Cosmic Rays”, Journal of Cosmology and Astroparticle Physics 04(2022)022, [doi:10.1088/1475-7516/2022/04/022](https://doi.org/10.1088/1475-7516/2022/04/022), [arXiv:2112.01501](https://arxiv.org/abs/2112.01501) * ARIANNA collaboration (A. Anker et al.), “Improving sensitivity of the ARIANNA detector by rejecting thermal noise with deep learning”, Journal of Instrumentation 17 P03007 (2022), [doi:10.1088/1748-0221/17/03/P03007](https://doi.org/10.1088/1748-0221/17/03/P03007), [arXiv:2112.01031](https://arxiv.org/abs/2112.01031) From 7c82ea65717281101580d26f2f1d1cfc556b15cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 11:25:43 +0200 Subject: [PATCH 054/102] Add several sanity checks which otherwise would lead to terminations. Check if run is present in run table, check if all mattak files are in directory (improves confidence that mattak files are not corrupt), check that event trigger time is not inf. Simplify/restructure import of runtable --- .../modules/io/RNO_G/readRNOGDataMattak.py | 86 +++++++++++++++---- 1 file changed, 67 insertions(+), 19 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 2035f790b..759630bde 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -1,9 +1,9 @@ import numpy as np -import pandas import logging import os import time import astropy.time +import math from NuRadioReco.modules.base.module import register_run @@ -15,14 +15,6 @@ from NuRadioReco.utilities import units import mattak.Dataset -try: - from rnog_data.runtable import RunTable - imported_runtable = True -except ImportError: - print("Import of run table failed. You will not be able to select runs! \n" - "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") - imported_runtable = False - def baseline_correction(wfs, n_bins=128, func=np.median): """ @@ -104,6 +96,38 @@ def get_time_offset(trigger_type): return time_offsets[trigger_type] else: raise KeyError(f"Unknown trigger type: {trigger_type}. Known are: FORCE, LT, RADIANT. Abort ....") + + +def all_files_in_directory(mattak_dir): + """ + Checks if all Mattak root files are in a directory. + Ignoring runinfo.root because (asaik) not all runs have those and information is currently not read by Mattak. + There are mattak directories which produce a ReferenceError when reading. They have a "combined.root" which is + apparently empty but are missing the daqstatus, pedestal, and header file. + + Parameters + ---------- + + mattak_dir: str + Path to a mattak directory + + Returns + ------- + + all_there: bool + True, if all "req_files" are there and waveforms.root or combined.root. Otherwise returns False. + """ + # one or the other has to be present + if not os.path.exists(os.path.join(mattak_dir, "waveforms.root")) and \ + not os.path.exists(os.path.join(mattak_dir, "combined.root")): + return False + + req_files = ["daqstatus.root", "headers.root", "pedestal.root"] + for file in req_files: + if not os.path.exists(os.path.join(mattak_dir, file)): + return False + + return True class readRNOGData: @@ -207,22 +231,25 @@ def begin(self, self._time_low = None self._time_high = None + self.__run_table = None if select_runs: if run_table_path is None: - global imported_runtable - if imported_runtable: + try: + from rnog_data.runtable import RunTable self.logger.debug("Access RunTable database ...") try: self.__run_table = RunTable().get_table() except: self.logger.error("No connect to RunTable database could be established. " "Runs will not be filtered.") - imported_runtable = False + except ImportError: + self.logger.error("Import of run table failed. You will not be able to select runs! \n" + "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") else: + import pandas self.__run_table = pandas.read_csv(run_table_path) - imported_runtable = True - if select_runs: + if select_runs and self.__run_table is not None: self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz" f"\n\tSelect runs which are between {self._time_low} - {self._time_high}") @@ -263,17 +290,27 @@ def begin(self, if not os.path.exists(data_dir): self.logger.error(f"The directory {data_dir} does not exist") + continue + + if not all_files_in_directory(data_dir): + self.logger.error(f"Incomplete directory: {data_dir}. Skip ...") + continue dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend) # filter runs/datasets based on - if select_runs and imported_runtable and not self.__select_run(dataset): + if select_runs and self.__run_table is not None and not self.__select_run(dataset): self.__skipped_runs += 1 continue self.__n_runs += 1 self._datasets.append(dataset) self.__n_events_per_dataset.append(dataset.N()) + + if not len(self._datasets): + err = "Found no valid datasets. Stop!" + self.logger.error(err) + raise FileNotFoundError(err) # keeps track which event index is in which dataset self._event_idxs_datasets = np.cumsum(self.__n_events_per_dataset) @@ -310,6 +347,10 @@ def __select_run(self, dataset): run_info = self.__run_table.query(f"station == {station_id:d} & run == {run_id:d}") + if not len(run_info): + self.logger.error(f"Run {run_id:d} (station {station_id:d}) not in run table. Reject...") + return False + # "time_start/end" is stored in the isot format. datetime is much faster than astropy (~85ns vs 55 mus). # But using datetime would mean to stip decimals because datetime can only handle mu sec precision and can not cope # with the additional decimals for ns. @@ -425,7 +466,6 @@ def get_events_information(self, keys=["station", "run", "eventNumber"]): if not do_read: # ... or when it does not have the desired information first_event_info = next(iter(self._events_information)) - print(first_event_info) for key in keys: if key not in list(first_event_info.keys()): do_read = True @@ -468,14 +508,20 @@ def _get_event(self, event_info, waveforms): evt: NuRadioReco.framework.event """ - + + trigger_time = event_info.triggerTime + if math.isinf(trigger_time): + self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " + "has inf trigger time. Skip event...") + return None + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) - station.set_station_time(astropy.time.Time(event_info.triggerTime, format='unix')) + station.set_station_time(astropy.time.Time(trigger_time, format='unix')) trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) trigger.set_triggered() - trigger.set_trigger_time(event_info.triggerTime) + trigger.set_trigger_time(trigger_time) station.set_trigger(trigger) for channel_id, wf in enumerate(waveforms): @@ -539,6 +585,8 @@ def run(self): waveforms_of_event = wfs[idx] evt = self._get_event(evtinfo, waveforms_of_event) + if evt is None: + continue self._time_run += time.time() - t0 self.__counter += 1 From cab2c559321f26ab5070945a63df2fc9c2bdd97f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 11:35:12 +0200 Subject: [PATCH 055/102] Add more logger info to noiseImporter --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 2 +- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 759630bde..beabba213 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -512,7 +512,7 @@ def _get_event(self, event_info, waveforms): trigger_time = event_info.triggerTime if math.isinf(trigger_time): self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " - "has inf trigger time. Skip event...") + "has inf trigger time. Skip event...") return None evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 305bd7fee..85b011e01 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -2,6 +2,7 @@ import glob import os import collections +import time from NuRadioReco.modules.io.RNO_G.readRNOGDataMattak import readRNOGData from NuRadioReco.modules.base.module import register_run @@ -98,8 +99,11 @@ def begin(self, noise_folders, file_pattern="*", self._noise_reader.begin(self.__noise_folders, **default_reader_kwargs) # instead of reading all noise events into memory we only get certain information here and read all data in run() - self.logger.info("Get event informations ...") + self.logger.info("Get event informations ...") + t0 = time.time() noise_information = self._noise_reader.get_events_information(keys=["station"]) + self.logger.info(f"... in {t0 - time.time():.2f}s") + self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) From 413776cb713dbc67b0b3eaae6249c1c7395f569b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 12:08:40 +0200 Subject: [PATCH 056/102] With changes to the mattak reader, get_event_by_index can return none. Catch that in the noiseImporter --- .../measured_noise/RNO_G/noiseImporter.py | 45 ++++++++++++++++--- 1 file changed, 40 insertions(+), 5 deletions(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 85b011e01..287ca1ae8 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -102,7 +102,7 @@ def begin(self, noise_folders, file_pattern="*", self.logger.info("Get event informations ...") t0 = time.time() noise_information = self._noise_reader.get_events_information(keys=["station"]) - self.logger.info(f"... in {t0 - time.time():.2f}s") + self.logger.info(f"... in {time.time() - t0:.2f}s") self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) @@ -116,6 +116,44 @@ def __get_noise_channel(self, channel_id): else: return self.__channel_mapping[channel_id] + + def __draw_noise_event(self, mask): + """ + reader.get_event_by_index can return None when, e.g., the trigger time is inf or the sampling rate 0. + Hence, try again if that happens (should only occur rearly). + + Parameters + ---------- + + mask: np.array(bool) + Mask of which noise events are allowed (e.g. because of matching station ids, ...) + + Returns + ------- + + noise_event: NuRadioReco.framework.event + A event containing noise traces + + i_noise: int + The index of the drawn event + """ + tries = 0 + while tries < 100: + # int(..) necessary because pyroot can not handle np.int64 + i_noise = int(self.__random_gen.choice(self.__event_index_list[mask])) + noise_event = self._noise_reader.get_event_by_index(i_noise) + tries += 1 + if noise_event is not None: + break + + if noise_event is None: + err = "Could not draw a random station which is not None after 100 tries. Stop." + self.logger.error(err) + raise ValueError(err) + + self._n_use_event[i_noise] += 1 + return noise_event, i_noise + @register_run() def run(self, evt, station, det): @@ -130,10 +168,7 @@ def run(self, evt, station, det): # select all noise events station_mask = np.full_like(self.__event_index_list, True) - # int(..) necessary because pyroot can not handle np.int64 - i_noise = int(self.__random_gen.choice(self.__event_index_list[station_mask])) - self._n_use_event[i_noise] += 1 - noise_event = self._noise_reader.get_event_by_index(i_noise) + noise_event, i_noise = self.__draw_noise_event(station_mask) station_id = noise_event.get_station_ids()[0] noise_station = noise_event.get_station(station_id) From ca85a56ec1a4f8ba5140c4c0807afe851efe6c97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 14:15:36 +0200 Subject: [PATCH 057/102] Change tab spacing from 3 to 4 spaces in readRNOGDataMattak.py --- .../modules/io/RNO_G/readRNOGDataMattak.py | 1264 ++++++++--------- 1 file changed, 632 insertions(+), 632 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index beabba213..7f0280bd5 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -17,677 +17,677 @@ def baseline_correction(wfs, n_bins=128, func=np.median): - """ - Simple baseline correction function. Determines baseline in discrete chuncks of "n_bins" with - the function specified (i.e., mean or median). - - Parameters - ---------- - - wfs: np.array(n_events, n_channels, n_samples) - Waveforms of several events/channels. - - n_bins: int - Number of samples/bins in one "chunck". If None, calculate median/mean over entire trace. (Default: 128) - - func: np.mean or np.median - Function to calculate pedestal - - Returns - ------- - - wfs_corrected: np.array(n_events, n_channels, n_samples) - Baseline/pedestal corrected waveforms - """ + """ + Simple baseline correction function. Determines baseline in discrete chuncks of "n_bins" with + the function specified (i.e., mean or median). + + Parameters + ---------- + + wfs: np.array(n_events, n_channels, n_samples) + Waveforms of several events/channels. + + n_bins: int + Number of samples/bins in one "chunck". If None, calculate median/mean over entire trace. (Default: 128) + + func: np.mean or np.median + Function to calculate pedestal - # Example: Get baselines in chunks of 128 bins - # wfs in (n_events, n_channels, 2048) - # np.split -> (16, n_events, n_channels, 128) each waveform split in 16 chuncks - # func -> (16, n_events, n_channels) pedestal for each chunck - if n_bins is not None: - baseline_values = func(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) - - # np.repeat -> (2048, n_events, n_channels) concatenate the 16 chuncks to one baseline - baseline_traces = np.repeat(baseline_values, n_bins % 2048, axis=0) - else: - baseline_values = func(wfs, axis=-1) - # np.repeat -> (2048, n_events, n_channels) concatenate the 16 chuncks to one baseline - baseline_traces = np.repeat(baseline_values, 2048, axis=0) - - # np.moveaxis -> (n_events, n_channels, 2048) - baseline_traces = np.moveaxis(baseline_traces, 0, -1) + Returns + ------- - return wfs - baseline_traces + wfs_corrected: np.array(n_events, n_channels, n_samples) + Baseline/pedestal corrected waveforms + """ + + # Example: Get baselines in chunks of 128 bins + # wfs in (n_events, n_channels, 2048) + # np.split -> (16, n_events, n_channels, 128) each waveform split in 16 chuncks + # func -> (16, n_events, n_channels) pedestal for each chunck + if n_bins is not None: + baseline_values = func(np.split(wfs, 2048 // n_bins, axis=-1), axis=-1) + + # np.repeat -> (2048, n_events, n_channels) concatenate the 16 chuncks to one baseline + baseline_traces = np.repeat(baseline_values, n_bins % 2048, axis=0) + else: + baseline_values = func(wfs, axis=-1) + # np.repeat -> (2048, n_events, n_channels) concatenate the 16 chuncks to one baseline + baseline_traces = np.repeat(baseline_values, 2048, axis=0) + + # np.moveaxis -> (n_events, n_channels, 2048) + baseline_traces = np.moveaxis(baseline_traces, 0, -1) + + return wfs - baseline_traces def get_time_offset(trigger_type): - """ - Mapping the offset between trace start time and trigger time (~ signal time). - Temporary use hard-coded values for each trigger type. In the future this - information might be time, station, and channel dependent and should come - from a database (or is already calibrated in mattak) - - Current values motivated by figures posted in PR https://github.com/nu-radio/NuRadioMC/pull/519 - - Parameters - ---------- - - trigger_type: str - Trigger type encoded as string from Mattak - - Returns - ------- - - time_offset: float - trace_start_time = trigger_time - time_offset - - """ - - time_offsets = { - "FORCE": 0, - "LT": 250 * units.ns, - "RADIANT": 475 * units.ns - } - - if trigger_type.startswith("RADIANT"): - trigger_type = "RADIANT" - - if trigger_type in time_offsets: - return time_offsets[trigger_type] - else: - raise KeyError(f"Unknown trigger type: {trigger_type}. Known are: FORCE, LT, RADIANT. Abort ....") - - + """ + Mapping the offset between trace start time and trigger time (~ signal time). + Temporary use hard-coded values for each trigger type. In the future this + information might be time, station, and channel dependent and should come + from a database (or is already calibrated in mattak) + + Current values motivated by figures posted in PR https://github.com/nu-radio/NuRadioMC/pull/519 + + Parameters + ---------- + + trigger_type: str + Trigger type encoded as string from Mattak + + Returns + ------- + + time_offset: float + trace_start_time = trigger_time - time_offset + + """ + + time_offsets = { + "FORCE": 0, + "LT": 250 * units.ns, + "RADIANT": 475 * units.ns + } + + if trigger_type.startswith("RADIANT"): + trigger_type = "RADIANT" + + if trigger_type in time_offsets: + return time_offsets[trigger_type] + else: + raise KeyError(f"Unknown trigger type: {trigger_type}. Known are: FORCE, LT, RADIANT. Abort ....") + + def all_files_in_directory(mattak_dir): - """ - Checks if all Mattak root files are in a directory. - Ignoring runinfo.root because (asaik) not all runs have those and information is currently not read by Mattak. - There are mattak directories which produce a ReferenceError when reading. They have a "combined.root" which is - apparently empty but are missing the daqstatus, pedestal, and header file. - - Parameters - ---------- - - mattak_dir: str - Path to a mattak directory - - Returns - ------- - - all_there: bool - True, if all "req_files" are there and waveforms.root or combined.root. Otherwise returns False. - """ - # one or the other has to be present - if not os.path.exists(os.path.join(mattak_dir, "waveforms.root")) and \ - not os.path.exists(os.path.join(mattak_dir, "combined.root")): - return False - - req_files = ["daqstatus.root", "headers.root", "pedestal.root"] - for file in req_files: - if not os.path.exists(os.path.join(mattak_dir, file)): - return False - - return True + """ + Checks if all Mattak root files are in a directory. + Ignoring runinfo.root because (asaik) not all runs have those and information is currently not read by Mattak. + There are mattak directories which produce a ReferenceError when reading. They have a "combined.root" which is + apparently empty but are missing the daqstatus, pedestal, and header file. + + Parameters + ---------- + + mattak_dir: str + Path to a mattak directory + + Returns + ------- + + all_there: bool + True, if all "req_files" are there and waveforms.root or combined.root. Otherwise returns False. + """ + # one or the other has to be present + if not os.path.exists(os.path.join(mattak_dir, "waveforms.root")) and \ + not os.path.exists(os.path.join(mattak_dir, "combined.root")): + return False + + req_files = ["daqstatus.root", "headers.root", "pedestal.root"] + for file in req_files: + if not os.path.exists(os.path.join(mattak_dir, file)): + return False + + return True class readRNOGData: - def begin(self, - data_dirs, - read_calibrated_data=False, - select_triggers=None, - select_runs=False, - apply_baseline_correction=True, - convert_to_voltage=True, - selectors=None, - run_table_path=None, - run_types=["physics"], - run_time_range=None, - max_trigger_rate=0 * units.Hz, - mattak_backend="auto", - log_level=logging.INFO): - """ - - Parameters - ---------- - - data_dirs: list of strings / string - Path to run directories (i.e. ".../stationXX/runXXX/") - - read_calibrated_data: bool - If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. - (temp. Default: False, this can/should be switched once the calibration in incorp. into Mattak) - - select_triggers: str or list(str) - Names of triggers which should be selected. Convinence interface instead of passing a selector - (see "selectors" below. (Default: None) - - select_runs: bool - If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). - If the run_table is not available no selection is performed (and the programm is not interrupted, - only an error message is raised). See parameters to configure run selection. (Default: False) - - Other Parameters - ---------------- - - apply_baseline_correction: bool - Only applies when non-calibrated data are read. If true, correct for DC offset. - (Default: True) - - convert_to_voltage: bool - Only applies when non-calibrated data are read. If true, convert ADC to voltage. - (Default: True) - - selectors: list of lambdas - List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. - Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" - - run_table_path: str - Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) - - run_types: list - Used to select/reject runs from information in the RNO-G RunTable. List of run_types to be used. (Default: ['physics']) - - run_time_range: tuple - Specify a time range to select runs (it is sufficient that runs cover the time range partially). - Each value of the tuple has to be in a format which astropy.time.Time understands. A value can be None - which means that the lower or upper bound is unconstrained. If run_time_range is None no time selection is - applied. (Default: None) - - max_trigger_rate: float - Used to select/reject runs from information in the RNO-G RunTable. Maximum allowed trigger rate (per run) in Hz. - If 0, no cut is applied. (Default: 1 Hz) - - mattak_backend: str - Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise - a "fallback" to uproot is used. (Default: "auto") - - log_level: enum - Set verbosity level of logger - """ - - t0 = time.time() - - self.logger = logging.getLogger('NuRadioReco.readRNOGData') - self.logger.setLevel(log_level) - - self._read_calibrated_data = read_calibrated_data - self._apply_baseline_correction = apply_baseline_correction - self._convert_to_voltage = convert_to_voltage - - # Temporary solution hard-coded values from Cosmin. Only used when uncalibrated data - # is read and convert_to_voltage is True. - self._adc_ref_voltage_range = 2.5 * units.volt - self._adc_n_bits = 12 - - self.__max_trigger_rate = max_trigger_rate - self.__run_types = run_types - - if run_time_range is not None: - convert_time = lambda t: None if t is None else astropy.time.Time(t) - self._time_low = convert_time(run_time_range[0]) - self._time_high = convert_time(run_time_range[1]) - else: - self._time_low = None - self._time_high = None - - self.__run_table = None - if select_runs: - if run_table_path is None: - try: - from rnog_data.runtable import RunTable - self.logger.debug("Access RunTable database ...") - try: - self.__run_table = RunTable().get_table() - except: - self.logger.error("No connect to RunTable database could be established. " - "Runs will not be filtered.") - except ImportError: - self.logger.error("Import of run table failed. You will not be able to select runs! \n" - "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") - else: - import pandas - self.__run_table = pandas.read_csv(run_table_path) + def begin(self, + data_dirs, + read_calibrated_data=False, + select_triggers=None, + select_runs=False, + apply_baseline_correction=True, + convert_to_voltage=True, + selectors=None, + run_table_path=None, + run_types=["physics"], + run_time_range=None, + max_trigger_rate=0 * units.Hz, + mattak_backend="auto", + log_level=logging.INFO): + """ + + Parameters + ---------- + + data_dirs: list of strings / string + Path to run directories (i.e. ".../stationXX/runXXX/") - if select_runs and self.__run_table is not None: - self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + - f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz" - f"\n\tSelect runs which are between {self._time_low} - {self._time_high}") - - if not isinstance(data_dirs, (list, np.ndarray)): - data_dirs = [data_dirs] - - if selectors is not None: - if not isinstance(selectors, (list, np.ndarray)): - selectors = [selectors] - - self.logger.info(f"Found {len(selectors)} selector(s)") + read_calibrated_data: bool + If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. + (temp. Default: False, this can/should be switched once the calibration in incorp. into Mattak) - if select_triggers is not None: - if isinstance(select_triggers, str): - selectors.append(lambda eventInfo: eventInfo.triggerType == select_triggers) - else: - for select_trigger in select_triggers: - selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) - - self._selectors = selectors - - self._time_begin = 0 - self._time_run = 0 - self.__counter = 0 - self.__skipped = 0 - - self._events_information = None - self._datasets = [] - self.__n_events_per_dataset = [] - - self.logger.info(f"Parse through {len(data_dirs)} directory/ies.") - - self.__skipped_runs = 0 - self.__n_runs = 0 - - for data_dir in data_dirs: - - if not os.path.exists(data_dir): - self.logger.error(f"The directory {data_dir} does not exist") - continue - - if not all_files_in_directory(data_dir): - self.logger.error(f"Incomplete directory: {data_dir}. Skip ...") - continue - - dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend) - - # filter runs/datasets based on - if select_runs and self.__run_table is not None and not self.__select_run(dataset): - self.__skipped_runs += 1 - continue - - self.__n_runs += 1 - self._datasets.append(dataset) - self.__n_events_per_dataset.append(dataset.N()) - - if not len(self._datasets): - err = "Found no valid datasets. Stop!" - self.logger.error(err) - raise FileNotFoundError(err) - - # keeps track which event index is in which dataset - self._event_idxs_datasets = np.cumsum(self.__n_events_per_dataset) - self._n_events_total = np.sum(self.__n_events_per_dataset) - self._time_begin = time.time() - t0 - - self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") - self.logger.info(f"{self._n_events_total} events in {len(self._datasets)} runs/datasets have been found.") + select_triggers: str or list(str) + Names of triggers which should be selected. Convinence interface instead of passing a selector + (see "selectors" below. (Default: None) - if not self._n_events_total: - err = "No runs have been selected. Abort ..." - self.logger.error(err) - raise ValueError(err) - - - def __select_run(self, dataset): - """ Filter/select runs/datasets. - - Parameters - ---------- - - dataset: mattak.Dataset.Dataset - - select: bool - Return True to select an dataset, return False to reject/skip it. - """ - - # get first eventInfo - dataset.setEntries(0) - event_info = dataset.eventInfo() - - run_id = event_info.run - station_id = event_info.station - - run_info = self.__run_table.query(f"station == {station_id:d} & run == {run_id:d}") - - if not len(run_info): - self.logger.error(f"Run {run_id:d} (station {station_id:d}) not in run table. Reject...") - return False - - # "time_start/end" is stored in the isot format. datetime is much faster than astropy (~85ns vs 55 mus). - # But using datetime would mean to stip decimals because datetime can only handle mu sec precision and can not cope - # with the additional decimals for ns. - if self._time_low is not None: - time_end = astropy.time.Time(run_info["time_end"].values[0]) - if time_end < self._time_low: - self.logger.info(f"Reject station {station_id} run {run_id} because run ended before {self._time_low}") - return False - - if self._time_high is not None: - time_start = astropy.time.Time(run_info["time_start"].values[0]) - if time_start > self._time_high: - self.logger.info(f"Reject station {station_id} run {run_id} because run started time after {self._time_high}") - return False - - run_type = run_info["run_type"].values[0] - if not run_type in self.__run_types: - self.logger.info(f"Reject station {station_id} run {run_id} because of run type {run_type}") - return False - - trigger_rate = run_info["trigger_rate"].values[0] * units.Hz - if self.__max_trigger_rate and trigger_rate > self.__max_trigger_rate: - self.logger.info(f"Reject station {station_id} run {run_id} because trigger rate is to high ({trigger_rate / units.Hz} Hz)") - return False - - return True - - - def __get_n_events_of_prev_datasets(self, dataset_idx): - """ Get accumulated number of events from previous datasets """ - dataset_idx_prev = dataset_idx - 1 - return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 - - - def __get_dataset_for_event(self, event_idx): - """ Get correct dataset and set entry accordingly to event index - - Parameters - ---------- - - event_index: int - Same as in read_event(). - - Returns - ------- - - dataset: mattak.Dataset.Dataset - """ - # find correct dataset - dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) - dataset = self._datasets[dataset_idx] - - event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) - dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event + select_runs: bool + If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). + If the run_table is not available no selection is performed (and the programm is not interrupted, + only an error message is raised). See parameters to configure run selection. (Default: False) - return dataset - - - def _filter_event(self, evtinfo, event_idx=None): - """ Filter an event base on its EventInfo and the configured selectors. - - Parameters - ---------- - - event_info: mattak.Dataset.EventInfo - The event info object for one event. - - event_index: int - Same as in read_event(). Only use for logger.info(). (Default: None) - - Returns - ------- - - skip: bool - Returns True to skip/reject event, return False to keep/read event - """ - if self._selectors is not None: - for selector in self._selectors: - if not selector(evtinfo): - self.logger.debug(f"Event {event_idx} (station {evtinfo.station}, run {evtinfo.run}, " - f"event number {evtinfo.eventNumber}) is skipped.") - self.__skipped += 1 - return True - - return False - - - def get_events_information(self, keys=["station", "run", "eventNumber"]): - """ Return information of all events from the EventInfo - - This function is useful to make a pre-selection of events before actually reading them in combination with - self.read_event(). - - Parameters - ---------- - - keys: list(str) - List of the information to receive from each event. Have to match the attributes (member variables) - of the mattak.Dataset.EventInfo class (examples are "station", "run", "triggerTime", "triggerType", "eventNumber", ...). - (Default: ["station", "run", "eventNumber"]) - - Returns - ------- - - data: dict - Keys of the dict are the event indecies (as used in self.read_event(event_index)). The values are dictinaries - them self containing the information specified with "keys" parameter. - """ - - # Read if dict is None ... - do_read = self._events_information is None - - if not do_read: - # ... or when it does not have the desired information - first_event_info = next(iter(self._events_information)) - for key in keys: - if key not in list(first_event_info.keys()): - do_read = True - - if do_read: - - self._events_information = {} - n_prev = 0 - for dataset in self._datasets: - dataset.setEntries((0, dataset.N())) + Other Parameters + ---------------- + + apply_baseline_correction: bool + Only applies when non-calibrated data are read. If true, correct for DC offset. + (Default: True) + + convert_to_voltage: bool + Only applies when non-calibrated data are read. If true, convert ADC to voltage. + (Default: True) + + selectors: list of lambdas + List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. + Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" - for idx, evtinfo in enumerate(dataset.eventInfo()): # returns a list - - event_idx = idx + n_prev # event index accross all datasets combined + run_table_path: str + Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) - if self._filter_event(evtinfo, event_idx): - continue + run_types: list + Used to select/reject runs from information in the RNO-G RunTable. List of run_types to be used. (Default: ['physics']) - self._events_information[event_idx] = {key: getattr(evtinfo, key) for key in keys} + run_time_range: tuple + Specify a time range to select runs (it is sufficient that runs cover the time range partially). + Each value of the tuple has to be in a format which astropy.time.Time understands. A value can be None + which means that the lower or upper bound is unconstrained. If run_time_range is None no time selection is + applied. (Default: None) - n_prev += dataset.N() - - return self._events_information - - - def _get_event(self, event_info, waveforms): - """ Return a NuRadioReco event - - Parameters - ---------- - - event_info: mattak.Dataset.EventInfo - The event info object for one event. - - waveforms: np.array(n_channel, n_samples) - Typically what dataset.wfs() returns (for one event!) - - Returns - ------- - - evt: NuRadioReco.framework.event - """ - - trigger_time = event_info.triggerTime - if math.isinf(trigger_time): - self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " - "has inf trigger time. Skip event...") - return None - - evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) - station = NuRadioReco.framework.station.Station(event_info.station) - station.set_station_time(astropy.time.Time(trigger_time, format='unix')) - - trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) - trigger.set_triggered() - trigger.set_trigger_time(trigger_time) - station.set_trigger(trigger) - - for channel_id, wf in enumerate(waveforms): - channel = NuRadioReco.framework.channel.Channel(channel_id) - if self._read_calibrated_data: - channel.set_trace(wf * units.mV, event_info.sampleRate * units.GHz) - else: - # wf stores ADC counts + max_trigger_rate: float + Used to select/reject runs from information in the RNO-G RunTable. Maximum allowed trigger rate (per run) in Hz. + If 0, no cut is applied. (Default: 1 Hz) - if self._apply_baseline_correction: - # correct baseline - wf = baseline_correction(wf) + mattak_backend: str + Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise + a "fallback" to uproot is used. (Default: "auto") + + log_level: enum + Set verbosity level of logger + """ + + t0 = time.time() + + self.logger = logging.getLogger('NuRadioReco.readRNOGData') + self.logger.setLevel(log_level) + + self._read_calibrated_data = read_calibrated_data + self._apply_baseline_correction = apply_baseline_correction + self._convert_to_voltage = convert_to_voltage + + # Temporary solution hard-coded values from Cosmin. Only used when uncalibrated data + # is read and convert_to_voltage is True. + self._adc_ref_voltage_range = 2.5 * units.volt + self._adc_n_bits = 12 + + self.__max_trigger_rate = max_trigger_rate + self.__run_types = run_types + + if run_time_range is not None: + convert_time = lambda t: None if t is None else astropy.time.Time(t) + self._time_low = convert_time(run_time_range[0]) + self._time_high = convert_time(run_time_range[1]) + else: + self._time_low = None + self._time_high = None + + self.__run_table = None + if select_runs: + if run_table_path is None: + try: + from rnog_data.runtable import RunTable + self.logger.debug("Access RunTable database ...") + try: + self.__run_table = RunTable().get_table() + except: + self.logger.error("No connect to RunTable database could be established. " + "Runs will not be filtered.") + except ImportError: + self.logger.error("Import of run table failed. You will not be able to select runs! \n" + "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") + else: + import pandas + self.__run_table = pandas.read_csv(run_table_path) + + if select_runs and self.__run_table is not None: + self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + + f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz" + f"\n\tSelect runs which are between {self._time_low} - {self._time_high}") + + if not isinstance(data_dirs, (list, np.ndarray)): + data_dirs = [data_dirs] + + if selectors is not None: + if not isinstance(selectors, (list, np.ndarray)): + selectors = [selectors] - if self._convert_to_voltage: - # convert adc to voltage - wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) - - channel.set_trace(wf, event_info.sampleRate * units.GHz) - - time_offset = get_time_offset(event_info.triggerType) - channel.set_trace_start_time(-time_offset) # relative to event/trigger time - - station.add_channel(channel) - - evt.set_station(station) - - return evt - - - @register_run() - def run(self): - """ - Loop over all events. - - Returns - ------- - - evt: generator(NuRadioReco.framework.event) - """ - event_idx = -1 - for dataset in self._datasets: - dataset.setEntries((0, dataset.N())) - - # read all event infos of the entier dataset (= run) - event_infos = dataset.eventInfo() - wfs = None - - for idx, evtinfo in enumerate(event_infos): # returns a list - event_idx += 1 + self.logger.info(f"Found {len(selectors)} selector(s)") + + self._selectors = selectors + + if select_triggers is not None: + if isinstance(select_triggers, str): + selectors.append(lambda eventInfo: eventInfo.triggerType == select_triggers) + else: + for select_trigger in select_triggers: + selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) + + self._time_begin = 0 + self._time_run = 0 + self.__counter = 0 + self.__skipped = 0 + + self._events_information = None + self._datasets = [] + self.__n_events_per_dataset = [] + + self.logger.info(f"Parse through {len(data_dirs)} directory/ies.") + + self.__skipped_runs = 0 + self.__n_runs = 0 + + for data_dir in data_dirs: - self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") - t0 = time.time() - - if self._filter_event(evtinfo, event_idx): - continue + if not os.path.exists(data_dir): + self.logger.error(f"The directory {data_dir} does not exist") + continue - # Just read wfs if necessary - if wfs is None: - wfs = dataset.wfs() - - waveforms_of_event = wfs[idx] + if not all_files_in_directory(data_dir): + self.logger.error(f"Incomplete directory: {data_dir}. Skip ...") + continue + + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend) + + # filter runs/datasets based on + if select_runs and self.__run_table is not None and not self.__select_run(dataset): + self.__skipped_runs += 1 + continue - evt = self._get_event(evtinfo, waveforms_of_event) - if evt is None: - continue + self.__n_runs += 1 + self._datasets.append(dataset) + self.__n_events_per_dataset.append(dataset.N()) - self._time_run += time.time() - t0 - self.__counter += 1 - - yield evt - - - - def get_event_by_index(self, event_index): - """ Allows to read a specific event identifed by its index - - Parameters - ---------- - - event_index: int - The index of a particluar event. The index is the chronological number from 0 to - number of total events (across all datasets). - - Returns - ------- - - evt: NuRadioReco.framework.event - """ - - self.logger.debug(f"Processing event number {event_index} out of total {self._n_events_total}") - t0 = time.time() + if not len(self._datasets): + err = "Found no valid datasets. Stop!" + self.logger.error(err) + raise FileNotFoundError(err) + + # keeps track which event index is in which dataset + self._event_idxs_datasets = np.cumsum(self.__n_events_per_dataset) + self._n_events_total = np.sum(self.__n_events_per_dataset) + self._time_begin = time.time() - t0 + + self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") + self.logger.info(f"{self._n_events_total} events in {len(self._datasets)} runs/datasets have been found.") + + if not self._n_events_total: + err = "No runs have been selected. Abort ..." + self.logger.error(err) + raise ValueError(err) + + + def __select_run(self, dataset): + """ Filter/select runs/datasets. + + Parameters + ---------- + + dataset: mattak.Dataset.Dataset + + select: bool + Return True to select an dataset, return False to reject/skip it. + """ + + # get first eventInfo + dataset.setEntries(0) + event_info = dataset.eventInfo() + + run_id = event_info.run + station_id = event_info.station + + run_info = self.__run_table.query(f"station == {station_id:d} & run == {run_id:d}") + + if not len(run_info): + self.logger.error(f"Run {run_id:d} (station {station_id:d}) not in run table. Reject...") + return False + + # "time_start/end" is stored in the isot format. datetime is much faster than astropy (~85ns vs 55 mus). + # But using datetime would mean to stip decimals because datetime can only handle mu sec precision and can not cope + # with the additional decimals for ns. + if self._time_low is not None: + time_end = astropy.time.Time(run_info["time_end"].values[0]) + if time_end < self._time_low: + self.logger.info(f"Reject station {station_id} run {run_id} because run ended before {self._time_low}") + return False + + if self._time_high is not None: + time_start = astropy.time.Time(run_info["time_start"].values[0]) + if time_start > self._time_high: + self.logger.info(f"Reject station {station_id} run {run_id} because run started time after {self._time_high}") + return False + + run_type = run_info["run_type"].values[0] + if not run_type in self.__run_types: + self.logger.info(f"Reject station {station_id} run {run_id} because of run type {run_type}") + return False + + trigger_rate = run_info["trigger_rate"].values[0] * units.Hz + if self.__max_trigger_rate and trigger_rate > self.__max_trigger_rate: + self.logger.info(f"Reject station {station_id} run {run_id} because trigger rate is to high ({trigger_rate / units.Hz} Hz)") + return False + + return True - dataset = self.__get_dataset_for_event(event_index) - event_info = dataset.eventInfo() # returns a single eventInfo - if self._filter_event(event_info, event_index): - return None + def __get_n_events_of_prev_datasets(self, dataset_idx): + """ Get accumulated number of events from previous datasets """ + dataset_idx_prev = dataset_idx - 1 + return int(self._event_idxs_datasets[dataset_idx_prev]) if dataset_idx_prev >= 0 else 0 + + + def __get_dataset_for_event(self, event_idx): + """ Get correct dataset and set entry accordingly to event index + + Parameters + ---------- + + event_index: int + Same as in read_event(). + + Returns + ------- + + dataset: mattak.Dataset.Dataset + """ + # find correct dataset + dataset_idx = np.digitize(event_idx, self._event_idxs_datasets) + dataset = self._datasets[dataset_idx] + + event_idx_in_dataset = event_idx - self.__get_n_events_of_prev_datasets(dataset_idx) + dataset.setEntries(event_idx_in_dataset) # increment iterator -> point to new event + + return dataset + + + def _filter_event(self, evtinfo, event_idx=None): + """ Filter an event base on its EventInfo and the configured selectors. + + Parameters + ---------- + + event_info: mattak.Dataset.EventInfo + The event info object for one event. + + event_index: int + Same as in read_event(). Only use for logger.info(). (Default: None) - # access data - waveforms = dataset.wfs() - - evt = self._get_event(event_info, waveforms) - - self._time_run += time.time() - t0 - self.__counter += 1 - - return evt - - - def get_event(self, run_nr, event_id): - """ Allows to read a specific event identifed by its id + Returns + ------- + + skip: bool + Returns True to skip/reject event, return False to keep/read event + """ + if self._selectors is not None: + for selector in self._selectors: + if not selector(evtinfo): + self.logger.debug(f"Event {event_idx} (station {evtinfo.station}, run {evtinfo.run}, " + f"event number {evtinfo.eventNumber}) is skipped.") + self.__skipped += 1 + return True + + return False + + + def get_events_information(self, keys=["station", "run", "eventNumber"]): + """ Return information of all events from the EventInfo + + This function is useful to make a pre-selection of events before actually reading them in combination with + self.read_event(). + + Parameters + ---------- + + keys: list(str) + List of the information to receive from each event. Have to match the attributes (member variables) + of the mattak.Dataset.EventInfo class (examples are "station", "run", "triggerTime", "triggerType", "eventNumber", ...). + (Default: ["station", "run", "eventNumber"]) + + Returns + ------- + + data: dict + Keys of the dict are the event indecies (as used in self.read_event(event_index)). The values are dictinaries + them self containing the information specified with "keys" parameter. + """ + + # Read if dict is None ... + do_read = self._events_information is None + + if not do_read: + # ... or when it does not have the desired information + first_event_info = next(iter(self._events_information)) + for key in keys: + if key not in list(first_event_info.keys()): + do_read = True + + if do_read: + + self._events_information = {} + n_prev = 0 + for dataset in self._datasets: + dataset.setEntries((0, dataset.N())) + + for idx, evtinfo in enumerate(dataset.eventInfo()): # returns a list + + event_idx = idx + n_prev # event index accross all datasets combined + + if self._filter_event(evtinfo, event_idx): + continue + + self._events_information[event_idx] = {key: getattr(evtinfo, key) for key in keys} + + n_prev += dataset.N() + + return self._events_information + + + def _get_event(self, event_info, waveforms): + """ Return a NuRadioReco event + + Parameters + ---------- + + event_info: mattak.Dataset.EventInfo + The event info object for one event. + + waveforms: np.array(n_channel, n_samples) + Typically what dataset.wfs() returns (for one event!) + + Returns + ------- + + evt: NuRadioReco.framework.event + """ + + trigger_time = event_info.triggerTime + if math.isinf(trigger_time): + self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " + "has inf trigger time. Skip event...") + return None + + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) + station = NuRadioReco.framework.station.Station(event_info.station) + station.set_station_time(astropy.time.Time(trigger_time, format='unix')) + + trigger = NuRadioReco.framework.trigger.Trigger(event_info.triggerType) + trigger.set_triggered() + trigger.set_trigger_time(trigger_time) + station.set_trigger(trigger) + + for channel_id, wf in enumerate(waveforms): + channel = NuRadioReco.framework.channel.Channel(channel_id) + if self._read_calibrated_data: + channel.set_trace(wf * units.mV, event_info.sampleRate * units.GHz) + else: + # wf stores ADC counts + + if self._apply_baseline_correction: + # correct baseline + wf = baseline_correction(wf) + + if self._convert_to_voltage: + # convert adc to voltage + wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) + + channel.set_trace(wf, event_info.sampleRate * units.GHz) + + time_offset = get_time_offset(event_info.triggerType) + channel.set_trace_start_time(-time_offset) # relative to event/trigger time + + station.add_channel(channel) + + evt.set_station(station) + + return evt + + + @register_run() + def run(self): + """ + Loop over all events. + + Returns + ------- + + evt: generator(NuRadioReco.framework.event) + """ + event_idx = -1 + for dataset in self._datasets: + dataset.setEntries((0, dataset.N())) + + # read all event infos of the entier dataset (= run) + event_infos = dataset.eventInfo() + wfs = None + + for idx, evtinfo in enumerate(event_infos): # returns a list + event_idx += 1 + + self.logger.debug(f"Processing event number {event_idx} out of total {self._n_events_total}") + t0 = time.time() + + if self._filter_event(evtinfo, event_idx): + continue + + # Just read wfs if necessary + if wfs is None: + wfs = dataset.wfs() + + waveforms_of_event = wfs[idx] + + evt = self._get_event(evtinfo, waveforms_of_event) + if evt is None: + continue + + self._time_run += time.time() - t0 + self.__counter += 1 + + yield evt + + + + def get_event_by_index(self, event_index): + """ Allows to read a specific event identifed by its index + + Parameters + ---------- + + event_index: int + The index of a particluar event. The index is the chronological number from 0 to + number of total events (across all datasets). + + Returns + ------- + + evt: NuRadioReco.framework.event + """ + + self.logger.debug(f"Processing event number {event_index} out of total {self._n_events_total}") + t0 = time.time() + + dataset = self.__get_dataset_for_event(event_index) + event_info = dataset.eventInfo() # returns a single eventInfo + + if self._filter_event(event_info, event_index): + return None + + # access data + waveforms = dataset.wfs() + + evt = self._get_event(event_info, waveforms) + + self._time_run += time.time() - t0 + self.__counter += 1 + + return evt + + + def get_event(self, run_nr, event_id): + """ Allows to read a specific event identifed by its id - Parameters - ---------- - - run_nr: int - Run number + Parameters + ---------- + + run_nr: int + Run number - event_id: int - Event Id - - Returns - ------- + event_id: int + Event Id + + Returns + ------- - evt: NuRadioReco.framework.event - """ + evt: NuRadioReco.framework.event + """ - self.logger.debug(f"Processing event {event_id}") - t0 = time.time() + self.logger.debug(f"Processing event {event_id}") + t0 = time.time() - event_infos = self.get_events_information(keys=["eventNumber", "run"]) - event_idx_ids = np.array([[index, ele["eventNumber"], ele["run"]] for index, ele in event_infos.items()]) - mask = np.all([event_idx_ids[:, 1] == event_id, event_idx_ids[:, 2] == run_nr], axis=0) + event_infos = self.get_events_information(keys=["eventNumber", "run"]) + event_idx_ids = np.array([[index, ele["eventNumber"], ele["run"]] for index, ele in event_infos.items()]) + mask = np.all([event_idx_ids[:, 1] == event_id, event_idx_ids[:, 2] == run_nr], axis=0) - if not np.any(mask): - self.logger.info(f"Could not find event with id: {event_id}.") - return None - elif np.sum(mask) > 1: - self.logger.error(f"Found several events with the same id: {event_id}.") - raise ValueError(f"Found several events with the same id: {event_id}.") - else: - pass + if not np.any(mask): + self.logger.info(f"Could not find event with id: {event_id}.") + return None + elif np.sum(mask) > 1: + self.logger.error(f"Found several events with the same id: {event_id}.") + raise ValueError(f"Found several events with the same id: {event_id}.") + else: + pass - event_index = event_idx_ids[mask, 0][0] + event_index = event_idx_ids[mask, 0][0] - dataset = self.__get_dataset_for_event(event_index) - event_info = dataset.eventInfo() # returns a single eventInfo + dataset = self.__get_dataset_for_event(event_index) + event_info = dataset.eventInfo() # returns a single eventInfo - if self._filter_event(event_info, event_index): - return None + if self._filter_event(event_info, event_index): + return None - # access data - waveforms = dataset.wfs() + # access data + waveforms = dataset.wfs() - evt = self._get_event(event_info, waveforms) + evt = self._get_event(event_info, waveforms) - self._time_run += time.time() - t0 - self.__counter += 1 + self._time_run += time.time() - t0 + self.__counter += 1 - return evt + return evt - def end(self): - self.logger.info( - f"\n\tRead {self.__counter} events (skipped {self.__skipped} events)" - f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" - f"\n\tTime to initialize all events : {self._time_run:.2f}s" - f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" - f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") + def end(self): + self.logger.info( + f"\n\tRead {self.__counter} events (skipped {self.__skipped} events)" + f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" + f"\n\tTime to initialize all events : {self._time_run:.2f}s" + f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" + f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") From dbbb46ae719bb90f6b9e519fac9d180ac63302fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 14:15:36 +0200 Subject: [PATCH 058/102] Change tab spacing from 3 to 4 spaces in readRNOGDataMattak.py --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 7f0280bd5..80d2357a4 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -632,11 +632,12 @@ def get_event_by_index(self, event_index): def get_event(self, run_nr, event_id): - """ Allows to read a specific event identifed by its id + + """ Allows to read a specific event identifed by run number and event id Parameters ---------- - + run_nr: int Run number From 933610d560f7ae34f5f87303cee42fb7b59cb5c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 14:16:47 +0200 Subject: [PATCH 059/102] Add additional sanity check for sampling rate to _get_event --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 80d2357a4..335b5dc85 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -515,6 +515,12 @@ def _get_event(self, event_info, waveforms): "has inf trigger time. Skip event...") return None + sampling_rate = event_info.sampleRate + if sampling_rate == 0: + self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " + f"has a sampling rate of {sampling_rate} GHz. Skip event...") + return None + evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) station.set_station_time(astropy.time.Time(trigger_time, format='unix')) @@ -527,7 +533,7 @@ def _get_event(self, event_info, waveforms): for channel_id, wf in enumerate(waveforms): channel = NuRadioReco.framework.channel.Channel(channel_id) if self._read_calibrated_data: - channel.set_trace(wf * units.mV, event_info.sampleRate * units.GHz) + channel.set_trace(wf * units.mV, sampling_rate * units.GHz) else: # wf stores ADC counts @@ -539,7 +545,7 @@ def _get_event(self, event_info, waveforms): # convert adc to voltage wf *= (self._adc_ref_voltage_range / (2 ** (self._adc_n_bits) - 1)) - channel.set_trace(wf, event_info.sampleRate * units.GHz) + channel.set_trace(wf, sampling_rate * units.GHz) time_offset = get_time_offset(event_info.triggerType) channel.set_trace_start_time(-time_offset) # relative to event/trigger time From 5c1455499caff9f0c5b68b443ada30e934e0fb0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 14:38:59 +0200 Subject: [PATCH 060/102] Pass logging level to mattak --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 335b5dc85..d40189863 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -286,6 +286,7 @@ def begin(self, self.__skipped_runs = 0 self.__n_runs = 0 + verbose = log_level == logging.DEBUG for data_dir in data_dirs: if not os.path.exists(data_dir): @@ -296,7 +297,7 @@ def begin(self, self.logger.error(f"Incomplete directory: {data_dir}. Skip ...") continue - dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend) + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend, verbose=verbose) # filter runs/datasets based on if select_runs and self.__run_table is not None and not self.__select_run(dataset): From edbef10044eb1ef497d858ef569f99a4f41d027b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 18:44:51 +0200 Subject: [PATCH 061/102] Fixing bug in readers begin() method. rearange the begin method a bit --- .../modules/io/RNO_G/readRNOGDataMattak.py | 51 ++++++++++--------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index d40189863..881ba3769 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -139,7 +139,7 @@ def begin(self, select_runs=False, apply_baseline_correction=True, convert_to_voltage=True, - selectors=None, + selectors=[], run_table_path=None, run_types=["physics"], run_time_range=None, @@ -219,18 +219,8 @@ def begin(self, # is read and convert_to_voltage is True. self._adc_ref_voltage_range = 2.5 * units.volt self._adc_n_bits = 12 - - self.__max_trigger_rate = max_trigger_rate - self.__run_types = run_types - - if run_time_range is not None: - convert_time = lambda t: None if t is None else astropy.time.Time(t) - self._time_low = convert_time(run_time_range[0]) - self._time_high = convert_time(run_time_range[1]) - else: - self._time_low = None - self._time_high = None - + + # Initialize run table for run selection self.__run_table = None if select_runs: if run_table_path is None: @@ -248,22 +238,29 @@ def begin(self, else: import pandas self.__run_table = pandas.read_csv(run_table_path) - + + # Set parameter for run selection + self.__max_trigger_rate = max_trigger_rate + self.__run_types = run_types + + if run_time_range is not None: + convert_time = lambda t: None if t is None else astropy.time.Time(t) + self._time_low = convert_time(run_time_range[0]) + self._time_high = convert_time(run_time_range[1]) + else: + self._time_low = None + self._time_high = None + if select_runs and self.__run_table is not None: self.logger.info("\n\tSelect runs with type: {}".format(", ".join(run_types)) + f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz" f"\n\tSelect runs which are between {self._time_low} - {self._time_high}") - if not isinstance(data_dirs, (list, np.ndarray)): - data_dirs = [data_dirs] - - if selectors is not None: - if not isinstance(selectors, (list, np.ndarray)): - selectors = [selectors] - - self.logger.info(f"Found {len(selectors)} selector(s)") - - self._selectors = selectors + # Initialize selectors for event filtering + if not isinstance(selectors, (list, np.ndarray)): + selectors = [selectors] + + self.logger.info(f"Found {len(selectors)} selector(s)") if select_triggers is not None: if isinstance(select_triggers, str): @@ -271,7 +268,10 @@ def begin(self, else: for select_trigger in select_triggers: selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) + + self._selectors = selectors + # Read data self._time_begin = 0 self._time_run = 0 self.__counter = 0 @@ -286,6 +286,9 @@ def begin(self, self.__skipped_runs = 0 self.__n_runs = 0 + if not isinstance(data_dirs, (list, np.ndarray)): + data_dirs = [data_dirs] + verbose = log_level == logging.DEBUG for data_dir in data_dirs: From 7ad0d98e07bb44903f0a1c7976f5c2823608ed2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 26 Apr 2023 18:45:25 +0200 Subject: [PATCH 062/102] Import logger output --- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 287ca1ae8..70ce1e016 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -82,7 +82,8 @@ def begin(self, noise_folders, file_pattern="*", self.logger.info(f"Found {len(self.__noise_folders)}") if not len(self.__noise_folders): - raise ValueError + self.logger.error("No folders found") + raise FileNotFoundError("No folders found") if scramble_noise_file_order: self.__random_gen.shuffle(self.__noise_folders) From eacffe47522d314388e1e8e2c796ad83bb57f0e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 27 Apr 2023 12:29:53 +0200 Subject: [PATCH 063/102] Improve logging, change begin interface (introduce general mattak_kwargs) --- .../modules/io/RNO_G/readRNOGDataMattak.py | 49 +++++++++++-------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 881ba3769..b8d90a729 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -133,19 +133,19 @@ def all_files_in_directory(mattak_dir): class readRNOGData: def begin(self, - data_dirs, - read_calibrated_data=False, - select_triggers=None, - select_runs=False, - apply_baseline_correction=True, - convert_to_voltage=True, - selectors=[], - run_table_path=None, - run_types=["physics"], - run_time_range=None, - max_trigger_rate=0 * units.Hz, - mattak_backend="auto", - log_level=logging.INFO): + data_dirs, + read_calibrated_data=False, + select_triggers=None, + select_runs=False, + apply_baseline_correction=True, + convert_to_voltage=True, + selectors=[], + run_table_path=None, + run_types=["physics"], + run_time_range=None, + max_trigger_rate=0 * units.Hz, + mattak_kwargs={}, + log_level=logging.INFO): """ Parameters @@ -198,12 +198,14 @@ def begin(self, Used to select/reject runs from information in the RNO-G RunTable. Maximum allowed trigger rate (per run) in Hz. If 0, no cut is applied. (Default: 1 Hz) - mattak_backend: str - Select a mattak backend. Options are "auto", "pyroot", "uproot". If "auto" is selected, pyroot is used if available otherwise - a "fallback" to uproot is used. (Default: "auto") + mattak_kwargs: dict + Dictionary of arguments for mattak.Dataset.Dataset. (Default: {}) + Example: Select a mattak "backend". Options are "auto", "pyroot", "uproot". If "auto" is selected, + pyroot is used if available otherwise a "fallback" to uproot is used. (Default: "auto") log_level: enum - Set verbosity level of logger + Set verbosity level of logger. If logging.DEBUG, set mattak to verbose (unless specified in mattak_kwargs). + (Default: logging.INFO) """ t0 = time.time() @@ -289,7 +291,12 @@ def begin(self, if not isinstance(data_dirs, (list, np.ndarray)): data_dirs = [data_dirs] - verbose = log_level == logging.DEBUG + # Set verbose for mattak + if "verbose" in mattak_kwargs: + vabose = mattak_kwargs.pop("verbose") + else: + verbose = log_level == logging.DEBUG + for data_dir in data_dirs: if not os.path.exists(data_dir): @@ -300,7 +307,7 @@ def begin(self, self.logger.error(f"Incomplete directory: {data_dir}. Skip ...") continue - dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, backend=mattak_backend, verbose=verbose) + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, verbose=verbose, **mattak_kwargs) # filter runs/datasets based on if select_runs and self.__run_table is not None and not self.__select_run(dataset): @@ -321,8 +328,8 @@ def begin(self, self._n_events_total = np.sum(self.__n_events_per_dataset) self._time_begin = time.time() - t0 - self.logger.info(f"Using the {self._datasets[0].backend} Mattak backend.") - self.logger.info(f"{self._n_events_total} events in {len(self._datasets)} runs/datasets have been found.") + self.logger.info(f"{self._n_events_total} events in {len(self._datasets)} runs/datasets " + f"have been found using the {self._datasets[0].backend} Mattak backend.") if not self._n_events_total: err = "No runs have been selected. Abort ..." From 1e19aa7aa16d93c24de35fb296e54e50fc1776ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 27 Apr 2023 13:34:07 +0200 Subject: [PATCH 064/102] fix whitespace in logger --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index b8d90a729..b77895095 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -707,5 +707,5 @@ def end(self): f"\n\tRead {self.__counter} events (skipped {self.__skipped} events)" f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" f"\n\tTime to initialize all events : {self._time_run:.2f}s" - f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" + f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") From e0940ab5c2a48462dac4b2112aae168019d766ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 28 Apr 2023 10:28:25 +0200 Subject: [PATCH 065/102] Remove one ws --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 1 - 1 file changed, 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index b77895095..9e528af1e 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -649,7 +649,6 @@ def get_event_by_index(self, event_index): def get_event(self, run_nr, event_id): - """ Allows to read a specific event identifed by run number and event id Parameters From 055e058a5256be8829f09431f2833381f5c1909c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 28 Apr 2023 15:48:48 +0200 Subject: [PATCH 066/102] Start implementing support to read file by file --- .../modules/io/RNO_G/readRNOGDataMattak.py | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 9e528af1e..e6d4d1ec8 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -133,7 +133,7 @@ def all_files_in_directory(mattak_dir): class readRNOGData: def begin(self, - data_dirs, + dirs_files, read_calibrated_data=False, select_triggers=None, select_runs=False, @@ -151,8 +151,8 @@ def begin(self, Parameters ---------- - data_dirs: list of strings / string - Path to run directories (i.e. ".../stationXX/runXXX/") + dirs_files: str, list(str) + Path to run directories (i.e. ".../stationXX/runXXX/") or path to root files (have to be "combined" mattak files). read_calibrated_data: bool If True, read calibrated waveforms from Mattak.Dataset. If False, read "raw" ADC traces. @@ -283,13 +283,13 @@ def begin(self, self._datasets = [] self.__n_events_per_dataset = [] - self.logger.info(f"Parse through {len(data_dirs)} directory/ies.") + self.logger.info(f"Parse through / read-in {len(dirs_files)} directory(ies) / file(s).") self.__skipped_runs = 0 self.__n_runs = 0 - if not isinstance(data_dirs, (list, np.ndarray)): - data_dirs = [data_dirs] + if not isinstance(dirs_files, (list, np.ndarray)): + dirs_files = [dirs_files] # Set verbose for mattak if "verbose" in mattak_kwargs: @@ -297,17 +297,22 @@ def begin(self, else: verbose = log_level == logging.DEBUG - for data_dir in data_dirs: + for dir_file in dirs_files: - if not os.path.exists(data_dir): - self.logger.error(f"The directory {data_dir} does not exist") + if not os.path.exists(dir_file): + self.logger.error(f"The directory/file {dir_file} does not exist") continue - if not all_files_in_directory(data_dir): - self.logger.error(f"Incomplete directory: {data_dir}. Skip ...") - continue - - dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=data_dir, verbose=verbose, **mattak_kwargs) + if os.path.isdir(dir_file): + + if not all_files_in_directory(dir_file): + self.logger.error(f"Incomplete directory: {dir_file}. Skip ...") + continue + + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=dir_file, verbose=verbose, **mattak_kwargs) + else: + raise NotImplementedError("The option to read in files is not implemented yet") + # filter runs/datasets based on if select_runs and self.__run_table is not None and not self.__select_run(dataset): From 9491ae2a5b62da9bfe4f6434ddb57bd78e7a85ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 28 Apr 2023 16:51:00 +0200 Subject: [PATCH 067/102] Improve logger --- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index 70ce1e016..d3f18689f 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -103,7 +103,7 @@ def begin(self, noise_folders, file_pattern="*", self.logger.info("Get event informations ...") t0 = time.time() noise_information = self._noise_reader.get_events_information(keys=["station"]) - self.logger.info(f"... in {time.time() - t0:.2f}s") + self.logger.info(f"... of {len(noise_information)} (selected) events in {time.time() - t0:.2f}s") self.__event_index_list = np.array(list(noise_information.keys())) self.__station_id_list = np.array([ele["station"] for ele in noise_information.values()]) From ee79203e3088acbf90bf5fddfd58dc10a05271c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 28 Apr 2023 17:19:50 +0200 Subject: [PATCH 068/102] Check validity of eventInfo outside of _get_event to not unnecessarily read wfs --- .../modules/io/RNO_G/readRNOGDataMattak.py | 53 +++++++++++++++---- 1 file changed, 42 insertions(+), 11 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index e6d4d1ec8..c3248581e 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -507,6 +507,37 @@ def get_events_information(self, keys=["station", "run", "eventNumber"]): return self._events_information + def _check_for_valid_information_in_event_info(self, event_info): + """ + Checks if certain information (sampling rate, trigger time) in mattak.Dataset.EventInfo are valid + + Parameters + ---------- + + event_info: mattak.Dataset.EventInfo + + Returns + ------- + + is_valid: bool + Returns True if all information valid, false otherwise + """ + + + if math.isinf(event_info.triggerTime): + self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " + "has inf trigger time. Skip event...") + return False + + + if event_info.sampleRate == 0 or event_info.sampleRate is None: + self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " + f"has a sampling rate of {event_info.sampleRate} GHz. Skip event...") + return False + + return True + + def _get_event(self, event_info, waveforms): """ Return a NuRadioReco event @@ -526,16 +557,7 @@ def _get_event(self, event_info, waveforms): """ trigger_time = event_info.triggerTime - if math.isinf(trigger_time): - self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " - "has inf trigger time. Skip event...") - return None - sampling_rate = event_info.sampleRate - if sampling_rate == 0: - self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " - f"has a sampling rate of {sampling_rate} GHz. Skip event...") - return None evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) @@ -600,6 +622,9 @@ def run(self): if self._filter_event(evtinfo, event_idx): continue + if not self._check_for_valid_information_in_event_info(evtinfo): + continue + # Just read wfs if necessary if wfs is None: wfs = dataset.wfs() @@ -607,8 +632,6 @@ def run(self): waveforms_of_event = wfs[idx] evt = self._get_event(evtinfo, waveforms_of_event) - if evt is None: - continue self._time_run += time.time() - t0 self.__counter += 1 @@ -641,6 +664,10 @@ def get_event_by_index(self, event_index): if self._filter_event(event_info, event_index): return None + + # check this before reading the wfs + if not self._check_for_valid_information_in_event_info(event_info): + return None # access data waveforms = dataset.wfs() @@ -694,6 +721,10 @@ def get_event(self, run_nr, event_id): if self._filter_event(event_info, event_index): return None + + # check this before reading the wfs + if not self._check_for_valid_information_in_event_info(event_info): + return None # access data waveforms = dataset.wfs() From e47964d234dfabd62c0a38fae73cedd16b485d2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 28 Apr 2023 17:56:01 +0200 Subject: [PATCH 069/102] Move logger to right position, add more logging --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index c3248581e..22b0d2ce8 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -261,9 +261,7 @@ def begin(self, # Initialize selectors for event filtering if not isinstance(selectors, (list, np.ndarray)): selectors = [selectors] - - self.logger.info(f"Found {len(selectors)} selector(s)") - + if select_triggers is not None: if isinstance(select_triggers, str): selectors.append(lambda eventInfo: eventInfo.triggerType == select_triggers) @@ -272,12 +270,14 @@ def begin(self, selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) self._selectors = selectors - + self.logger.info(f"Found {len(self._selectors)} selector(s)") + # Read data self._time_begin = 0 self._time_run = 0 self.__counter = 0 self.__skipped = 0 + self.__invalid = 0 self._events_information = None self._datasets = [] @@ -527,12 +527,14 @@ def _check_for_valid_information_in_event_info(self, event_info): if math.isinf(event_info.triggerTime): self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " "has inf trigger time. Skip event...") + self.__invalid += 1 return False if event_info.sampleRate == 0 or event_info.sampleRate is None: self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " f"has a sampling rate of {event_info.sampleRate} GHz. Skip event...") + self.__invalid += 1 return False return True @@ -739,8 +741,8 @@ def get_event(self, run_nr, event_id): def end(self): self.logger.info( - f"\n\tRead {self.__counter} events (skipped {self.__skipped} events)" + f"\n\tRead {self.__counter} events (skipped {self.__skipped} events, {self.__invalid} invalid events)" f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" - f"\n\tTime to initialize all events : {self._time_run:.2f}s" + f"\n\tTime to read all events : {self._time_run:.2f}s" f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") From 47011bf1b80ecd409704737106bfb321c30d2ee1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 2 May 2023 14:49:31 +0200 Subject: [PATCH 070/102] Allow overwritting the sampling rate in the reader because some mattak files do not have that information --- .../modules/io/RNO_G/readRNOGDataMattak.py | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 22b0d2ce8..643b1d9fc 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -145,6 +145,7 @@ def begin(self, run_time_range=None, max_trigger_rate=0 * units.Hz, mattak_kwargs={}, + overwrite_sampling_rate=None, log_level=logging.INFO): """ @@ -201,7 +202,12 @@ def begin(self, mattak_kwargs: dict Dictionary of arguments for mattak.Dataset.Dataset. (Default: {}) Example: Select a mattak "backend". Options are "auto", "pyroot", "uproot". If "auto" is selected, - pyroot is used if available otherwise a "fallback" to uproot is used. (Default: "auto") + pyroot is used if available otherwise a "fallback" to uproot is used. (Default: "auto") + + overwrite_sampling_rate: float + Set sampling rate of the imported waveforms. This overwrites what is read out from runinfo (i.e., stored in the mattak files). + If None, nothing is overwritten and the sampling rate from the mattak file is used. (Default: None) + NOTE: This option might be necessary when old mattak files are read which have this not set. log_level: enum Set verbosity level of logger. If logging.DEBUG, set mattak to verbose (unless specified in mattak_kwargs). @@ -221,6 +227,8 @@ def begin(self, # is read and convert_to_voltage is True. self._adc_ref_voltage_range = 2.5 * units.volt self._adc_n_bits = 12 + + self._overwrite_sampling_rate = overwrite_sampling_rate # Initialize run table for run selection self.__run_table = None @@ -389,7 +397,7 @@ def __select_run(self, dataset): trigger_rate = run_info["trigger_rate"].values[0] * units.Hz if self.__max_trigger_rate and trigger_rate > self.__max_trigger_rate: - self.logger.info(f"Reject station {station_id} run {run_id} because trigger rate is to high ({trigger_rate / units.Hz} Hz)") + self.logger.info(f"Reject station {station_id} run {run_id} because trigger rate is to high ({trigger_rate / units.Hz:.2f} Hz)") return False return True @@ -531,9 +539,9 @@ def _check_for_valid_information_in_event_info(self, event_info): return False - if event_info.sampleRate == 0 or event_info.sampleRate is None: + if (event_info.sampleRate == 0 or event_info.sampleRate is None) and self._overwrite_sampling_rate is None: self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " - f"has a sampling rate of {event_info.sampleRate} GHz. Skip event...") + f"has a sampling rate of {event_info.sampleRate:.2f} GHz. Skip event...") self.__invalid += 1 return False @@ -559,7 +567,10 @@ def _get_event(self, event_info, waveforms): """ trigger_time = event_info.triggerTime - sampling_rate = event_info.sampleRate + if self._overwrite_sampling_rate is not None: + sampling_rate = self._overwrite_sampling_rate + else: + sampling_rate = event_info.sampleRate evt = NuRadioReco.framework.event.Event(event_info.run, event_info.eventNumber) station = NuRadioReco.framework.station.Station(event_info.station) From f331d1945942d339bcf75cf0633a7f3fbf1f4cfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 3 May 2023 18:19:07 +0200 Subject: [PATCH 071/102] Make conversion to int explicit in python --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 643b1d9fc..9c832f95b 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -727,7 +727,8 @@ def get_event(self, run_nr, event_id): else: pass - event_index = event_idx_ids[mask, 0][0] + # int(...) necessary to pass it to mattak + event_index = int(event_idx_ids[mask, 0][0]) dataset = self.__get_dataset_for_event(event_index) event_info = dataset.eventInfo() # returns a single eventInfo From 5c3fcb7183814b97ceb192328da5e1dbf63d4bae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Wed, 3 May 2023 18:23:56 +0200 Subject: [PATCH 072/102] Improve logger error output --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 9c832f95b..7c0c8ee84 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -541,7 +541,8 @@ def _check_for_valid_information_in_event_info(self, event_info): if (event_info.sampleRate == 0 or event_info.sampleRate is None) and self._overwrite_sampling_rate is None: self.logger.error(f"Event {event_info.eventNumber} (st {event_info.station}, run {event_info.run}) " - f"has a sampling rate of {event_info.sampleRate:.2f} GHz. Skip event...") + f"has a sampling rate of {event_info.sampleRate:.2f} GHz. Event is skipped ... " + f"You can avoid this by setting 'overwrite_sampling_rate' in the begin() method.") self.__invalid += 1 return False From fc51646b58eb658cfda9c69a5c4a9e27ab832d11 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= <30903175+fschlueter@users.noreply.github.com> Date: Fri, 5 May 2023 19:01:20 +0200 Subject: [PATCH 073/102] Update readRNOGDataMattak.py: Fix typo --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 7c0c8ee84..4c4b8fede 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -301,7 +301,7 @@ def begin(self, # Set verbose for mattak if "verbose" in mattak_kwargs: - vabose = mattak_kwargs.pop("verbose") + verbose = mattak_kwargs.pop("verbose") else: verbose = log_level == logging.DEBUG From 6ea10e6dbf0dcf3e039c9d72ed16b03fde3ce7cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 5 May 2023 19:13:51 +0200 Subject: [PATCH 074/102] Add init func which imports the run table. add set_selector function and use it in begin() --- .../modules/io/RNO_G/readRNOGDataMattak.py | 99 +++++++++++-------- 1 file changed, 60 insertions(+), 39 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 4c4b8fede..a61dfa8fc 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -131,7 +131,36 @@ def all_files_in_directory(mattak_dir): class readRNOGData: + + def __init__(self, run_table_path=None): + """ + Parameters + ---------- + + run_table_path: str + Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) + """ + + # Initialize run table for run selection + self.__run_table = None + if run_table_path is None: + try: + from rnog_data.runtable import RunTable + self.logger.debug("Access RunTable database ...") + try: + self.__run_table = RunTable().get_table() + except: + self.logger.warn("No connect to RunTable database could be established. " + "Runs can not be filtered.") + except ImportError: + self.logger.warn("Import of run table failed. Runs can not be filtered.! \n" + "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") + else: + import pandas + self.__run_table = pandas.read_csv(run_table_path) + + def begin(self, dirs_files, read_calibrated_data=False, @@ -140,7 +169,6 @@ def begin(self, apply_baseline_correction=True, convert_to_voltage=True, selectors=[], - run_table_path=None, run_types=["physics"], run_time_range=None, max_trigger_rate=0 * units.Hz, @@ -148,7 +176,6 @@ def begin(self, overwrite_sampling_rate=None, log_level=logging.INFO): """ - Parameters ---------- @@ -161,7 +188,7 @@ def begin(self, select_triggers: str or list(str) Names of triggers which should be selected. Convinence interface instead of passing a selector - (see "selectors" below. (Default: None) + (see "selectors" below). (Default: None) select_runs: bool If True, use information in run_table to select runs (based on run_type, run_time, trigger_rate, ...). @@ -182,10 +209,7 @@ def begin(self, selectors: list of lambdas List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" - - run_table_path: str - Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) - + run_types: list Used to select/reject runs from information in the RNO-G RunTable. List of run_types to be used. (Default: ['physics']) @@ -230,25 +254,6 @@ def begin(self, self._overwrite_sampling_rate = overwrite_sampling_rate - # Initialize run table for run selection - self.__run_table = None - if select_runs: - if run_table_path is None: - try: - from rnog_data.runtable import RunTable - self.logger.debug("Access RunTable database ...") - try: - self.__run_table = RunTable().get_table() - except: - self.logger.error("No connect to RunTable database could be established. " - "Runs will not be filtered.") - except ImportError: - self.logger.error("Import of run table failed. You will not be able to select runs! \n" - "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") - else: - import pandas - self.__run_table = pandas.read_csv(run_table_path) - # Set parameter for run selection self.__max_trigger_rate = max_trigger_rate self.__run_types = run_types @@ -266,19 +271,7 @@ def begin(self, f"\n\tSelect runs with max. trigger rate of {max_trigger_rate / units.Hz} Hz" f"\n\tSelect runs which are between {self._time_low} - {self._time_high}") - # Initialize selectors for event filtering - if not isinstance(selectors, (list, np.ndarray)): - selectors = [selectors] - - if select_triggers is not None: - if isinstance(select_triggers, str): - selectors.append(lambda eventInfo: eventInfo.triggerType == select_triggers) - else: - for select_trigger in select_triggers: - selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) - - self._selectors = selectors - self.logger.info(f"Found {len(self._selectors)} selector(s)") + self.set_selectors(selectors, select_triggers) # Read data self._time_begin = 0 @@ -348,6 +341,34 @@ def begin(self, err = "No runs have been selected. Abort ..." self.logger.error(err) raise ValueError(err) + + + def set_selectors(self, selectors, select_triggers=None): + """ + Parameters + ---------- + + selectors: list of lambdas + List of lambda(eventInfo) -> bool to pass to mattak.Dataset.iterate to select events. + Example: trigger_selector = lambda eventInfo: eventInfo.triggerType == "FORCE" + + select_triggers: str or list(str) + Names of triggers which should be selected. Convinence interface instead of passing a selector. (Default: None) + """ + + # Initialize selectors for event filtering + if not isinstance(selectors, (list, np.ndarray)): + selectors = [selectors] + + if select_triggers is not None: + if isinstance(select_triggers, str): + selectors.append(lambda eventInfo: eventInfo.triggerType == select_triggers) + else: + for select_trigger in select_triggers: + selectors.append(lambda eventInfo: eventInfo.triggerType == select_trigger) + + self._selectors = selectors + self.logger.info(f"Set {len(self._selectors)} selector(s)") def __select_run(self, dataset): From 9e063cc69176c361634802f77c61e28bf6835f86 Mon Sep 17 00:00:00 2001 From: Sjoerd Bouma Date: Sun, 7 May 2023 22:33:38 +0200 Subject: [PATCH 075/102] replaced np.float with float --- NuRadioMC/EvtGen/generate_unforced.py | 2 +- NuRadioMC/EvtGen/generator.py | 4 ++-- NuRadioMC/SignalGen/ARZ/ARZ.py | 2 +- NuRadioMC/SignalProp/analyticraytracing.py | 4 ++-- NuRadioMC/SignalProp/propagation_base_class.py | 4 ++-- NuRadioMC/simulation/simulation.py | 8 ++++---- NuRadioReco/detector/antennapattern.py | 14 +++++++------- .../apps/overview_plots/trigger_properties.py | 2 +- .../SNR_study/T01_generate_events_simple.py | 4 ++-- .../cr_efficiency_analysis/helper_cr_eff.py | 4 ++-- NuRadioReco/modules/analogToDigitalConverter.py | 2 +- .../envelope_phasedarray/triggerSimulator.py | 2 +- .../modules/phasedarray/triggerSimulator.py | 4 ++-- .../test/trigger_tests/compare_to_reference.py | 2 +- 14 files changed, 29 insertions(+), 29 deletions(-) diff --git a/NuRadioMC/EvtGen/generate_unforced.py b/NuRadioMC/EvtGen/generate_unforced.py index 2c228a68f..3732d9909 100644 --- a/NuRadioMC/EvtGen/generate_unforced.py +++ b/NuRadioMC/EvtGen/generate_unforced.py @@ -586,7 +586,7 @@ def points_in_cylinder(pt1, pt2, r, q): data_sets['flavors'] = np.ones(np.sum(mask_int)) data_sets["event_ids"] = np.arange(np.sum(mask_int)) + start_event_id data_sets["n_interaction"] = np.ones(np.sum(mask_int), dtype=int) - data_sets["vertex_times"] = np.zeros(np.sum(mask_int), dtype=np.float) + data_sets["vertex_times"] = np.zeros(np.sum(mask_int), dtype=float) data_sets["interaction_type"] = inelasticities.get_ccnc(np.sum(mask_int)) data_sets["inelasticity"] = inelasticities.get_neutrino_inelasticity(np.sum(mask_int)) diff --git a/NuRadioMC/EvtGen/generator.py b/NuRadioMC/EvtGen/generator.py index 9e31dda29..735b57131 100644 --- a/NuRadioMC/EvtGen/generator.py +++ b/NuRadioMC/EvtGen/generator.py @@ -904,7 +904,7 @@ def generate_surface_muons(filename, n_events, Emin, Emax, data_sets["event_group_ids"] = np.arange(i_batch * max_n_events_batch, i_batch * max_n_events_batch + n_events_batch, dtype=int) + start_event_id data_sets["n_interaction"] = np.ones(n_events_batch, dtype=int) - data_sets["vertex_times"] = np.zeros(n_events_batch, dtype=np.float) + data_sets["vertex_times"] = np.zeros(n_events_batch, dtype=float) # generate neutrino flavors randomly @@ -1238,7 +1238,7 @@ def generate_eventlist_cylinder(filename, n_events, Emin, Emax, data_sets["event_group_ids"] = np.arange(i_batch * max_n_events_batch, i_batch * max_n_events_batch + n_events_batch) + start_event_id logger.debug("generating number of interactions") data_sets["n_interaction"] = np.ones(n_events_batch, dtype=int) - data_sets["vertex_times"] = np.zeros(n_events_batch, dtype=np.float) + data_sets["vertex_times"] = np.zeros(n_events_batch, dtype=float) # generate neutrino flavors randomly logger.debug("generating flavors") diff --git a/NuRadioMC/SignalGen/ARZ/ARZ.py b/NuRadioMC/SignalGen/ARZ/ARZ.py index fd3a3faa1..a9fb22d57 100644 --- a/NuRadioMC/SignalGen/ARZ/ARZ.py +++ b/NuRadioMC/SignalGen/ARZ/ARZ.py @@ -239,7 +239,7 @@ def get_dist_shower(X, z): v[0] = vperp_x v[1] = vperp_y v[2] = vperp_z -# v = np.array([vperp_x, vperp_y, vperp_z], dtype=np.float64) +# v = np.array([vperp_x, vperp_y, vperp_z], dtype=float) """ Function F_p Eq.(15) PRD paper. """ diff --git a/NuRadioMC/SignalProp/analyticraytracing.py b/NuRadioMC/SignalProp/analyticraytracing.py index 7130c09dd..5a2108dbc 100644 --- a/NuRadioMC/SignalProp/analyticraytracing.py +++ b/NuRadioMC/SignalProp/analyticraytracing.py @@ -1622,8 +1622,8 @@ def set_start_and_end_point(self, x1, x2): if(self._X2[2] < self._X1[2]): self._swap = True self.__logger.debug('swap = True') - self._X2 = np.array(x1, dtype =np.float) - self._X1 = np.array(x2, dtype =np.float) + self._X2 = np.array(x1, dtype =float) + self._X1 = np.array(x2, dtype =float) dX = self._X2 - self._X1 self._dPhi = -np.arctan2(dX[1], dX[0]) diff --git a/NuRadioMC/SignalProp/propagation_base_class.py b/NuRadioMC/SignalProp/propagation_base_class.py index bbcd1130d..5bb45d4b4 100644 --- a/NuRadioMC/SignalProp/propagation_base_class.py +++ b/NuRadioMC/SignalProp/propagation_base_class.py @@ -90,8 +90,8 @@ def set_start_and_end_point(self, x1, x2): stop point of the ray """ self.reset_solutions() - self._X1 = np.array(x1, dtype =np.float) - self._X2 = np.array(x2, dtype = np.float) + self._X1 = np.array(x1, dtype =float) + self._X2 = np.array(x2, dtype = float) if (self._n_reflections): if (self._X1[2] < self._medium.reflection or self._X2[2] < self._medium.reflection): self.__logger.error("start or stop point is below the reflective bottom layer at {:.1f}m".format( diff --git a/NuRadioMC/simulation/simulation.py b/NuRadioMC/simulation/simulation.py index f5c5530a2..0bc99a85d 100644 --- a/NuRadioMC/simulation/simulation.py +++ b/NuRadioMC/simulation/simulation.py @@ -1428,10 +1428,10 @@ def _write_output_file(self, empty=False): fout["station_{:d}".format(station_id)].attrs['Vrms'] = list(self._Vrms_per_channel[station_id].values()) fout["station_{:d}".format(station_id)].attrs['bandwidth'] = list(self._bandwidth_per_channel[station_id].values()) - fout.attrs.create("Tnoise", self._noise_temp, dtype=np.float) - fout.attrs.create("Vrms", self._Vrms, dtype=np.float) - fout.attrs.create("dt", self._dt, dtype=np.float) - fout.attrs.create("bandwidth", self._bandwidth, dtype=np.float) + fout.attrs.create("Tnoise", self._noise_temp, dtype=float) + fout.attrs.create("Vrms", self._Vrms, dtype=float) + fout.attrs.create("dt", self._dt, dtype=float) + fout.attrs.create("bandwidth", self._bandwidth, dtype=float) fout.attrs['n_samples'] = self._n_samples fout.attrs['config'] = yaml.dump(self._cfg) diff --git a/NuRadioReco/detector/antennapattern.py b/NuRadioReco/detector/antennapattern.py index 89a6d9795..19f79a50e 100644 --- a/NuRadioReco/detector/antennapattern.py +++ b/NuRadioReco/detector/antennapattern.py @@ -623,13 +623,13 @@ def parse_AERA_XML_file(path): # get frequencies and angles frequencies_node = root.find("./frequency") - frequencies = np.array(frequencies_node.text.strip().split(), dtype=np.float) * units.MHz + frequencies = np.array(frequencies_node.text.strip().split(), dtype=float) * units.MHz theta_node = root.find("./theta") - thetas = np.array(theta_node.text.strip().split(), dtype=np.float) * units.deg + thetas = np.array(theta_node.text.strip().split(), dtype=float) * units.deg phi_node = root.find("./phi") - phis = np.array(phi_node.text.strip().split(), dtype=np.float) * units.deg + phis = np.array(phi_node.text.strip().split(), dtype=float) * units.deg n_freqs = len(frequencies) n_angles = len(phis) @@ -650,16 +650,16 @@ def parse_AERA_XML_file(path): freq_string = "%.1f" % freq theta_amp_node = root.find("./EAHTheta_amp[@idfreq='%s']" % freq_string) - theta_amps[iFreq] = np.array(theta_amp_node.text.strip().split(), dtype=np.float) * units.m + theta_amps[iFreq] = np.array(theta_amp_node.text.strip().split(), dtype=float) * units.m theta_phase_node = root.find("./EAHTheta_phase[@idfreq='%s']" % freq_string) - theta_phases[iFreq] = np.deg2rad(np.array(theta_phase_node.text.strip().split(" "), dtype=np.float)) + theta_phases[iFreq] = np.deg2rad(np.array(theta_phase_node.text.strip().split(" "), dtype=float)) phi_amp_node = root.find("./EAHPhi_amp[@idfreq='%s']" % freq_string) - phi_amps[iFreq] = np.array(phi_amp_node.text.strip().split(), dtype=np.float) * units.m + phi_amps[iFreq] = np.array(phi_amp_node.text.strip().split(), dtype=float) * units.m phi_phase_node = root.find("./EAHPhi_phase[@idfreq='%s']" % freq_string) - phi_phases[iFreq] = np.deg2rad(np.array(phi_phase_node.text.strip().split(), dtype=np.float)) + phi_phases[iFreq] = np.deg2rad(np.array(phi_phase_node.text.strip().split(), dtype=float)) return frequencies, phis, thetas, phi_amps, phi_phases, theta_amps, theta_phases diff --git a/NuRadioReco/eventbrowser/apps/overview_plots/trigger_properties.py b/NuRadioReco/eventbrowser/apps/overview_plots/trigger_properties.py index 5c7bcf926..fbad603ad 100644 --- a/NuRadioReco/eventbrowser/apps/overview_plots/trigger_properties.py +++ b/NuRadioReco/eventbrowser/apps/overview_plots/trigger_properties.py @@ -35,7 +35,7 @@ def trigger_overview_properties(filename, evt_counter, station_id, juser_id): for setting_name in trigger.get_trigger_settings(): display_value = '{}' setting_value = trigger.get_trigger_settings()[setting_name] - if type(setting_value) in [float, np.float32, np.float64, np.float128]: + if isinstance(setting_value, float): display_value = '{:.5g}' props.append( html.Div([ diff --git a/NuRadioReco/examples/AliasPhasedArray/SNR_study/T01_generate_events_simple.py b/NuRadioReco/examples/AliasPhasedArray/SNR_study/T01_generate_events_simple.py index 9d0947086..8cc768dd2 100644 --- a/NuRadioReco/examples/AliasPhasedArray/SNR_study/T01_generate_events_simple.py +++ b/NuRadioReco/examples/AliasPhasedArray/SNR_study/T01_generate_events_simple.py @@ -47,7 +47,7 @@ vertex_angle_max = 145 * units.deg vertex_angles = np.random.uniform(vertex_angle_min, vertex_angle_max, n_events) -data_sets["yy"] = np.zeros(n_events, dtype=np.float) +data_sets["yy"] = np.zeros(n_events, dtype=float) data_sets["zz"] = z_ant + distance * np.cos(vertex_angles) data_sets["xx"] = distance * np.sin(vertex_angles) @@ -59,7 +59,7 @@ data_sets["event_group_ids"] = np.arange(n_events) data_sets["shower_ids"] = np.arange(n_events) data_sets["n_interaction"] = np.ones(n_events, dtype=int) -data_sets["vertex_times"] = np.zeros(n_events, dtype=np.float) +data_sets["vertex_times"] = np.zeros(n_events, dtype=float) data_sets["flavors"] = np.ones(n_events, dtype=int) * flavor data_sets["energies"] = np.ones(n_events, dtype=int) * energy data_sets["interaction_type"] = ['cc'] * n_events diff --git a/NuRadioReco/examples/cr_efficiency_analysis/helper_cr_eff.py b/NuRadioReco/examples/cr_efficiency_analysis/helper_cr_eff.py index 244f1a916..0545f2954 100644 --- a/NuRadioReco/examples/cr_efficiency_analysis/helper_cr_eff.py +++ b/NuRadioReco/examples/cr_efficiency_analysis/helper_cr_eff.py @@ -13,9 +13,9 @@ class NumpyEncoder(json.JSONEncoder): """ Special json encoder for numpy types """ def default(self, obj): - if isinstance(obj, np.integer): + if isinstance(obj, int): return int(obj) - elif isinstance(obj, np.floating): + elif isinstance(obj, float): return float(obj) elif isinstance(obj, np.ndarray): return obj.tolist() diff --git a/NuRadioReco/modules/analogToDigitalConverter.py b/NuRadioReco/modules/analogToDigitalConverter.py index 37b229054..528666cf8 100644 --- a/NuRadioReco/modules/analogToDigitalConverter.py +++ b/NuRadioReco/modules/analogToDigitalConverter.py @@ -50,7 +50,7 @@ def perfect_comparator(trace, adc_n_bits, adc_ref_voltage, mode='floor', output= digital_trace = round_to_int(digital_trace) if (output == 'voltage'): - digital_trace = lsb_voltage * digital_trace.astype(np.float) + digital_trace = lsb_voltage * digital_trace.astype(float) elif (output == 'counts'): pass else: diff --git a/NuRadioReco/modules/envelope_phasedarray/triggerSimulator.py b/NuRadioReco/modules/envelope_phasedarray/triggerSimulator.py index 0a7aad5fa..e3df099c8 100644 --- a/NuRadioReco/modules/envelope_phasedarray/triggerSimulator.py +++ b/NuRadioReco/modules/envelope_phasedarray/triggerSimulator.py @@ -102,7 +102,7 @@ def envelope_trigger(self, adc_type='perfect_floor_comparator', diode=diode) time_step = 1 / det.get_channel(station_id, channel_id)['trigger_adc_sampling_frequency'] - times = np.arange(len(trace), dtype=np.float) * time_step + times = np.arange(len(trace), dtype=float) * time_step times += channel.get_trace_start_time() else: diff --git a/NuRadioReco/modules/phasedarray/triggerSimulator.py b/NuRadioReco/modules/phasedarray/triggerSimulator.py index d5bf7c545..41f357b32 100644 --- a/NuRadioReco/modules/phasedarray/triggerSimulator.py +++ b/NuRadioReco/modules/phasedarray/triggerSimulator.py @@ -222,7 +222,7 @@ def power_sum(self, coh_sum, window, step, adc_output='voltage'): num_frames = int(np.floor((len(coh_sum) - window) / step)) if(adc_output == 'voltage'): - coh_sum_squared = (coh_sum * coh_sum).astype(np.float) + coh_sum_squared = (coh_sum * coh_sum).astype(float) elif(adc_output == 'counts'): coh_sum_squared = (coh_sum * coh_sum).astype(int) @@ -230,7 +230,7 @@ def power_sum(self, coh_sum, window, step, adc_output='voltage'): (coh_sum_squared.strides[0] * step, coh_sum_squared.strides[0])) power = np.sum(coh_sum_windowed, axis=1) - return power.astype(np.float) / window, num_frames + return power.astype(float) / window, num_frames def phase_signals(self, traces, beam_rolls): """ diff --git a/NuRadioReco/test/trigger_tests/compare_to_reference.py b/NuRadioReco/test/trigger_tests/compare_to_reference.py index 215b1aa37..1ab665832 100644 --- a/NuRadioReco/test/trigger_tests/compare_to_reference.py +++ b/NuRadioReco/test/trigger_tests/compare_to_reference.py @@ -41,7 +41,7 @@ for prop in properties: if(prop == "trigger_time"): try: - np.testing.assert_allclose(np.array(trigger_results[trigger_name][prop], dtype=np.float64), np.array(reference[trigger_name][prop], dtype=np.float64)) + np.testing.assert_allclose(np.array(trigger_results[trigger_name][prop], dtype=float), np.array(reference[trigger_name][prop], dtype=float)) except AssertionError as e: print('Property {} of trigger {} differs from reference'.format(prop, trigger_name)) print(e) From 1c77f3dd9cf6e803cd39843c9f003d68066c2671 Mon Sep 17 00:00:00 2001 From: Sjoerd Bouma Date: Sun, 7 May 2023 22:35:00 +0200 Subject: [PATCH 076/102] update changelog and version number --- changelog.txt | 4 ++++ pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/changelog.txt b/changelog.txt index 63f7f9742..7aeac9209 100644 --- a/changelog.txt +++ b/changelog.txt @@ -2,6 +2,10 @@ Changelog - to keep track of all relevant changes please update the categories "new features" and "bugfixes" before a pull request merge! +version 2.1.8 +bugfixes: +- replace deprecated np.float with float + version 2.1.7 new features: - add attenuation model from the 2021 measurements taken at Summit Station diff --git a/pyproject.toml b/pyproject.toml index f6b69b708..fb2eaf372 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "NuRadioMC" -version = "2.1.7" +version = "2.1.8" authors = ["Christian Glaser et al."] homepage = "https://github.com/nu-radio/NuRadioMC" documentation = "https://nu-radio.github.io/NuRadioMC/main.html" From 31376d0934badca50deb24e83cea660587e7a1be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Thu, 11 May 2023 15:43:23 +0200 Subject: [PATCH 077/102] Allow trigger types 'UNKNOWN' to be read in by the reader --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index a61dfa8fc..ca4f27a98 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -86,16 +86,19 @@ def get_time_offset(trigger_type): time_offsets = { "FORCE": 0, "LT": 250 * units.ns, - "RADIANT": 475 * units.ns + "RADIANT": 475 * units.ns, + "UNKNOWN": 0 # Due to a firmware issue at the beginning of data taking the trigger types were not properly set. } + # Should have the same time offset ?! if trigger_type.startswith("RADIANT"): trigger_type = "RADIANT" if trigger_type in time_offsets: return time_offsets[trigger_type] else: - raise KeyError(f"Unknown trigger type: {trigger_type}. Known are: FORCE, LT, RADIANT. Abort ....") + known_trigger_types = ", ".join(time_offsets.keys()) + raise KeyError(f"Unknown trigger type: {trigger_type}. Known are: {known_trigger_types}. Abort ....") def all_files_in_directory(mattak_dir): From 1e98fd65aa588b3b48510b6963494a9f2441cf01 Mon Sep 17 00:00:00 2001 From: Alan Coleman Date: Mon, 15 May 2023 13:30:17 -0400 Subject: [PATCH 078/102] Added getter --- NuRadioMC/SignalGen/ARZ/ARZ.py | 37 ++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/NuRadioMC/SignalGen/ARZ/ARZ.py b/NuRadioMC/SignalGen/ARZ/ARZ.py index 4b8523200..efa1b3bd8 100644 --- a/NuRadioMC/SignalGen/ARZ/ARZ.py +++ b/NuRadioMC/SignalGen/ARZ/ARZ.py @@ -468,6 +468,38 @@ def set_interpolation_factor2(self, interp_factor): """ self._interp_factor2 = interp_factor + def get_shower_profile(self, shower_energy, shower_type, iN): + """ + Gets a charge-excess profile + + Parameters + ---------- + shower_energy: float + the energy of the shower + shower_type: string (default "HAD") + type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) + iN: int + specify shower number + + Returns + ------- + efield_trace: two arrays of floats + slant depths and charge profile amplitudes + """ + + energies = np.array([*self._library[shower_type]]) + iE = np.argmin(np.abs(energies - shower_energy)) + + rescaling_factor = shower_energy / energies[iE] + + profiles = self._library[shower_type][energies[iE]] + N_profiles = len(profiles['charge_excess']) + profile_depth = profiles['depth'] + profile_ce = profiles['charge_excess'][iN] * rescaling_factor + + return profile_depth, profile_ce + + def get_time_trace(self, shower_energy, theta, N, dt, shower_type, n_index, R, shift_for_xmax=False, same_shower=False, iN=None, output_mode='trace', maximum_angle=20 * units.deg): """ @@ -483,10 +515,6 @@ def get_time_trace(self, shower_energy, theta, N, dt, shower_type, n_index, R, s number of samples in the time domain dt: float size of one time bin in units of time - profile_depth: array of floats - shower depth values of the charge excess profile - profile_ce: array of floats - charge-excess values of the charge excess profile shower_type: string (default "HAD") type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) n_index: float (default 1.78) @@ -537,6 +565,7 @@ def get_time_trace(self, shower_energy, theta, N, dt, shower_type, n_index, R, s rescaling_factor = shower_energy / energies[iE] logger.info("shower energy of {:.3g}eV requested, closest available energy is {:.3g}eV. The amplitude of the charge-excess profile will be rescaled accordingly by a factor of {:.2f}".format(shower_energy / units.eV, energies[iE] / units.eV, rescaling_factor)) profiles = self._library[shower_type][energies[iE]] + N_profiles = len(profiles['charge_excess']) if(iN is None or np.isnan(iN)): From b03f3ff8e7802caf740b00c590c79b631a2b0caa Mon Sep 17 00:00:00 2001 From: Alan Coleman Date: Mon, 15 May 2023 13:34:40 -0400 Subject: [PATCH 079/102] Change log --- changelog.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/changelog.txt b/changelog.txt index 8d6735957..8f84eea2a 100644 --- a/changelog.txt +++ b/changelog.txt @@ -4,6 +4,7 @@ please update the categories "new features" and "bugfixes" before a pull request version 2.2.0-dev new features: +- added getting to query ARZ charge-excess profiles - upgrade to proposal v 7.5.0 with new NuRadioProposal interface and improved LPM treatment - add script to generate / download pre-calculated proposal tables for known detector configurations - adding default RadioPropa ice model object to medium with the feature to set a personlised object as alternative From 79cf83eb5ec131bb2378e5151e649b55de2a1c2f Mon Sep 17 00:00:00 2001 From: Alan Coleman Date: Mon, 15 May 2023 14:35:49 -0400 Subject: [PATCH 080/102] docstrings --- NuRadioMC/SignalGen/ARZ/ARZ.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/NuRadioMC/SignalGen/ARZ/ARZ.py b/NuRadioMC/SignalGen/ARZ/ARZ.py index efa1b3bd8..e860a021a 100644 --- a/NuRadioMC/SignalGen/ARZ/ARZ.py +++ b/NuRadioMC/SignalGen/ARZ/ARZ.py @@ -483,7 +483,7 @@ def get_shower_profile(self, shower_energy, shower_type, iN): Returns ------- - efield_trace: two arrays of floats + depth, excess: two arrays of floats slant depths and charge profile amplitudes """ @@ -493,7 +493,6 @@ def get_shower_profile(self, shower_energy, shower_type, iN): rescaling_factor = shower_energy / energies[iE] profiles = self._library[shower_type][energies[iE]] - N_profiles = len(profiles['charge_excess']) profile_depth = profiles['depth'] profile_ce = profiles['charge_excess'][iN] * rescaling_factor From 92be315d57d4ddf14057290fcda82edf4d8e38b8 Mon Sep 17 00:00:00 2001 From: Alan Coleman Date: Mon, 15 May 2023 15:32:30 -0400 Subject: [PATCH 081/102] more docstrings --- NuRadioMC/SignalGen/ARZ/ARZ.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/NuRadioMC/SignalGen/ARZ/ARZ.py b/NuRadioMC/SignalGen/ARZ/ARZ.py index e860a021a..5726776ec 100644 --- a/NuRadioMC/SignalGen/ARZ/ARZ.py +++ b/NuRadioMC/SignalGen/ARZ/ARZ.py @@ -63,7 +63,7 @@ def get_vector_potential( profile_ce: array of floats charge-excess values of the charge excess profile shower_type: string (default "HAD") - type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) + type of shower, either "HAD" (hadronic) or "EM" (electromagnetic) n_index: float (default 1.78) index of refraction where the shower development takes place distance: float (default 1km) @@ -477,7 +477,7 @@ def get_shower_profile(self, shower_energy, shower_type, iN): shower_energy: float the energy of the shower shower_type: string (default "HAD") - type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) + type of shower, either "HAD" (hadronic) or "EM" (electromagnetic) iN: int specify shower number @@ -515,7 +515,7 @@ def get_time_trace(self, shower_energy, theta, N, dt, shower_type, n_index, R, s dt: float size of one time bin in units of time shower_type: string (default "HAD") - type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) + type of shower, either "HAD" (hadronic) or "EM" (electromagnetic) n_index: float (default 1.78) index of refraction where the shower development takes place R: float (default 1km) @@ -625,7 +625,7 @@ def get_time_trace(self, shower_energy, theta, N, dt, shower_type, n_index, R, s logger.error("Tau showers are not yet implemented") raise NotImplementedError("Tau showers are not yet implemented") else: - msg = "showers of type {} are not implemented. Use 'HAD', 'EM' or 'TAU'".format(shower_type) + msg = "showers of type {} are not implemented. Use 'HAD', 'EM'".format(shower_type) logger.error(msg) raise NotImplementedError(msg) if self._use_numba: @@ -697,7 +697,7 @@ def get_vector_potential_fast(self, shower_energy, theta, N, dt, profile_depth, profile_ce: array of floats charge-excess values of the charge excess profile shower_type: string (default "HAD") - type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) + type of shower, either "HAD" (hadronic) or "EM" (electromagnetic) n_index: float (default 1.78) index of refraction where the shower development takes place distance: float (default 1km) @@ -1164,7 +1164,7 @@ def get_time_trace(self, shower_energy, theta, N, dt, shower_type, n_index, R, dt: float size of one time bin in units of time shower_type: string (default "HAD") - type of shower, either "HAD" (hadronic), "EM" (electromagnetic) or "TAU" (tau lepton induced) + type of shower, either "HAD" (hadronic) or "EM" (electromagnetic) n_index: float (default 1.78) index of refraction where the shower development takes place R: float (default 1km) From cbe2ff93500ba0c8617037046a00dc2258e822c9 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 14:08:51 +0000 Subject: [PATCH 082/102] this additional flag leads to errors on my end and does not seem to be needed --- NuRadioMC/SignalProp/CPPAnalyticRayTracing/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioMC/SignalProp/CPPAnalyticRayTracing/setup.py b/NuRadioMC/SignalProp/CPPAnalyticRayTracing/setup.py index 932437653..adc09a364 100644 --- a/NuRadioMC/SignalProp/CPPAnalyticRayTracing/setup.py +++ b/NuRadioMC/SignalProp/CPPAnalyticRayTracing/setup.py @@ -16,7 +16,7 @@ Extension('wrapper', ['wrapper.pyx'], include_dirs=[numpy.get_include(), '../../utilities/', str(os.environ['GSLDIR']) + '/include/'], library_dirs=[str(os.environ['GSLDIR']) + '/lib/'], - extra_compile_args=['-O3',"-mfpmath=sse"], + extra_compile_args=['-O3'], libraries=['gsl', 'gslcblas'], language='c++' ), From 617eceff0753fc6478f3921b9155854b52b4254b Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 14:09:34 +0000 Subject: [PATCH 083/102] remove print statement --- NuRadioMC/SignalProp/analyticraytracing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioMC/SignalProp/analyticraytracing.py b/NuRadioMC/SignalProp/analyticraytracing.py index d9e3c670a..0ab533144 100644 --- a/NuRadioMC/SignalProp/analyticraytracing.py +++ b/NuRadioMC/SignalProp/analyticraytracing.py @@ -639,7 +639,7 @@ def dt(t, C_0, frequency): mask = frequency > 0 freqs = self.__get_frequencies_for_attenuation(frequency, max_detector_freq) gamma_turn, z_turn = self.get_turning_point(self.medium.n_ice ** 2 - C_0 ** -2) - print("_use_optimized_calculation", self._use_optimized_calculation) + self.__logger.info("_use_optimized_calculation", self._use_optimized_calculation) if self._use_optimized_calculation: # The integration of the attenuation factor along the path with scipy.quad is inefficient. The From a57c484e9efaafa495120d33f1544c36be70cf80 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 14:09:59 +0000 Subject: [PATCH 084/102] update reference file to new noise realizations --- .../1e18_output_noise_reference.hdf5 | Bin 104304 -> 104304 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/NuRadioMC/test/SingleEvents/1e18_output_noise_reference.hdf5 b/NuRadioMC/test/SingleEvents/1e18_output_noise_reference.hdf5 index 093452baabd9803736afe32ba06098068080cf08..83cbb5536a735ae36672ff27073bbf0e39446454 100644 GIT binary patch literal 104304 zcmeFZ3p|zEw?9l#LP#pfy-=EJJGtK$ z60)p|wOA~Q6w&|L#XkSDf4}{E&pz)tXYcdA`}z17&m40+V~+VoPVZyei9Bxzn?eD zCeo97{-*ys5il`YWATgSdnbNR?FqX0+|QfIc4aa9XZ>s@s>#ox{o{_4V>*Wj2M>p( z^RYuG4svkJk_IC6%caku!N;NTed>?IkGA^rj`*MJs?W*6{gXFQ9_K_$=H&dzm^&dn zNv9I|zvTR$er}@FU-G9+nesyc&xF3IoD=47Bu!|XB$GXAGX3Y$t>&B7auWU2al(wN zhkxSmdsewmWP}{~*K(7!e0KC-%T4xw4wru|_j~sIRrr6rT`m66ZbXT3^_Q9p5smjJ z0z4e+)|hQ222T#lE!%z}KQey5oh1=Z{&G#W*q%tYGW|bG{}GSHKMdgf*-d`6 zcA}g9LwYL5e-%E(6ZjbX1^~z3r2l)9lX!?u6f-%#xzGFgtK0nge*HN<{Z_7K{=b*| z*VgvK&-{KUxia5HLop3X3T`&Eu$zA6+uonM$JsC(YQuE8hL3Lil}lfz##s8W7HSgzBKP21^T8RcY6zl&i=gWmjYIC!z!0B$$ulz;ZQ~ffGi}fXK6tDPL z{TqK$eulJB*?>;zukw@py}1T8$-d?Pzz>xSXj_r=O8*C)oAqnHaeW6#WP6$Z*=Zs( zN(IaHMIO9}-Z!^g|M&PW4jkWay{zQd=R1NQ1zmqzqW^pRg>$Fb_w%LwD$nub`f}lW zTllMxfP#PEUsf4verNcP^3B6JFl-z2tGuhf)jPkyF#X@-i}46Yr1ll*dpWyL4SQ0o z-__85h|{oG|M&Qby`)Xi;d%OVEp^g)jPrjjM@D?Nv(SOJzot2UTwg96+_FZgUajm` zzPgma7b!}ae!j22#D)cL^mocAWl-Gl*Zkk(PqdNWkm)~*;ZT_9eKRJ8qqh@Oc4C$q zIYDuP=1x#f?w<;#Oi;cFx_R19e)t5fn4pytlr}->6LkNCA;%_Y;>4JxJ>#dIl@oNr z-2XLX5?>_x@h{iu37iEc@U@wsuP12ZZz%r+&XZJRBE4_|XS)e{<2N*XB0WjRC(=9< zc+Z|N?9y-OkKyM38eje%cm5uCerdN~F8tSxJOA|6_M4a5FT>Q&ci%5Jf8?5+XG}~c z39|UdAL5nBPdg|53_{a>;_WqIzTcXkHoe#H8E-oKLK;CMEvhI{hnpUY2<6VlKBdH$cx{p0OQ z=*O$#7yrZ0NvHmgD*V2`L1m)-{MN7K_h(oApO*jMk4O98co4|{o8R*`|IZ)de@FY6 z9RDZB^`Cu!gTrF}zi&>Hqk_f1XaA3<{;@rq{IG$O>!*d2qxxh$CSPY03#(uKY`>TP zpZ9n7i%ozb^Y1~KME2iv|Ep4|{hPz&G${}n5a0G~yaivMK0x{Kh6b6=oAuI_C~)Z7 zDY2}97HoNO-Nvk(28X_0n->KXSoU~;tVCaYQD5F8azEsw3LkdJW`sD=>dRB3j3b>}zpj|IaynZDGHp|ZQ)26l{HH@!*dI1dt zv@CeLwJBh|RptLIw~n({r9i=_0E;= zJ$}5;o%=D226yFRQ+LZ#;Qq5urZF8YxIDbO#(+eFhfM-PR)qZ)tpipyitkau>`6hy zBO2`L7uVP&N`U|eK~K%T7GzhyIAq;MgGXO(*Nm*Ez!GB%lO0^|G1GF;A^SNEv|TQz z$VgE@VaLl8R;4XiF|R9SCWQu#-7CYMrxfDUcD>SrL7s5Z4awsZA;F!ctGq>m_AWDAr#+`0i98=9>BqJtpLL zh)y$e?xI55*%x2@Eeo;9+yB$!@-9q@3U0bkN`4a}7moSw*qlhC z0`;Coz%`FTRN-Eid!(WZ%k5N3wTh^)PqXPG?O-A1#l@PN5b}>)Vot80P{Hj&L=;>o z#FjK&;f(`bsIz58(Xmh}bU!;gDyv93Lvbt1da=vw=k!Z9jr5+8_Pd%O@iXM`;E z{@R5DBc%fQ{#01{@zN###f2EH$z!DVs0-biE>vsoT6YjLU^+wL1LRh6i2v%3L5k`!f{;?Gl&)?S-gekYH5O+l5Z|)wOPhP+^MR zch$1MLM-c8%fB_0u%}yF?LK4J=$a#J6H`v15Bv@a7)S2r4qTN z{aX&Gli|?}!7WcdGeNlO)ExJnf77Dlw40+&Nm441OEu@U9qT zLdT4X?vtAYgGO53cN6ko2H%haG%L|&_Z8<` zdSs~RVZO=v$b_3$`=57Dt;Ct1=6mYMkU=z*SK$7PZn#p@Ewm}H67_40MNA3%U5!OI zNfGt^qBgi;#{NodtYD^2kt9QvHZOypyBoTyMKaZ{RwBn#!x)ws8JsSJR-03pFhc6n z60oX7q4w-8w^Ye+F^-$#;0P1;*7YSt4 zQl%Ab8el~~qepTN3oLKHYcdNVL0jgQw}ZzTV4e_3XpXX0rzrG{1lMZZEthX?0M`#2(4CNP z(Y|zJT?7ek(u#tTjT=D!v?3*lu*Y-AC-ij?309cQWZhZQ0CuMi7WM8X%FT?a=gTBP zL-?jMkIyuKgzH0lGjkSv_x4=>hOl!1JDjF&+W=zEr@0JmXMyAVS@i;MNTB=eS)Ik^ z29WW#-+tec1siJ*-AEzI)zu(5&R*UCeFqhCRrj)BN&A?HDN*j-MUoH9tQtUc+nx6t zZCMaMXQhGcDG~^|m-=vMH-JK|)ZrR?7R+;i_vQo+*Cht1&paDIS|oY-H$uOx$h91e z<0N3&EelZC+W$zZ=lYv->zlHPDZS@YlTSz(t(& zalhH!5NaA2VzjLqHr##vP@mm_U-OE#ZTijxAI9NJZidwmZQ;cKfYyQb89k$nY2C1W zMw{a3NEPgjzvWfWK}OCj$>RG&KfPzU-e)hf3KDNM&sH4nz>MR4IxDAi1IN>uTjVcQ z!_s#fCEY%C;8BevyJ^IDMVZ^Ien+_)7$!SNHiaG7;U^KkK(ZVB`>u6AI8qHWzpLbpZQjaCKPOG=)ZeC@!1DBUmOoZWCX zd)jc`ohmr8`wQ2svJU)id6Ydxz8jwH*wEAv(FwMn-79+a@<3wQRHLEiOpqR_x_u_3 z6Lw8o=P|r851yRwGU88Uf?tNgl!Ta0Sai0tQt&_?IBCArkbBGo*4tv2&5@mON%gd{ z;GR6tconFuAIpTzPYg_#$8`c!>j`LT=Rrra;`hrBnb5jNt*YU1C%g%~XSim09%ygN zcZnhNxvLJod=v!Ey2s|I zyU`*2)asZBq@(v*ne@|PLEtBy9~|LHhpiUfUr!X!(YRf2=0V922n^jZY;~3nC*~HN zRLi2{4B8X>K#>qAf04Z>-G>g{LdDDWzoBEN<`nrI$w5#d>^ZgZ0v)C~M!#5`M#l>C z9@e5~K_G1z_hRuiI?%Q@s=g!iE2NB`iqHxKo~TDzafE(xj|1&Ngnn?sjo(xPAuv;7 z7w;uH3?{f}=2X*h<-oCT`C1`h<>9^X-BCK2aG8HneMHBF&8bIxXM}*@BC+Ew_X+;i z%|lmZC(v;%ZD&$nQBWv689O343>|DTJ$ZQ!gF#egjQNv2rGQX^xKNHO!@gEpb%f~Gx-;?KEW&_Pzsha28E0j}D z2&8Z!FyA_!j8J#jO&o3W&WxPbSFR&q5ORwHCI};aM_=wwF%182X zX9pU=*Vry2E}@o*Z?D;1+r!Mmp>=VZy1qoY4l{1|w!Oj|hi_=FO31@Tj+$Mz*V%A{ zcN?FvQYJQU$#$L^myb94_ZO`v>cz5?JFvF-6)xNxm3I7SK1N%t)(Z4t!zwpUvU^Gv z)@pNp|M)Z?SEN+w?Dk*-UuLN8g&kRFGq|MM-6 zdLCZgD4}kCkqxT!j!T|A=igdtEW(w#S!`T64ucz}=8ZqH{>8Sep#758%TOY87qXT#?JX>idOz3*N;cNJlMliRQ;WQrn z02zs|CDY7__gZq$+>**hNI&l^(jd_RDdR~7nyO6r9_#zSBBl|7Om&)~zkh)D@qC-! zn=&Dpr$k7oxe;1@IRjtJ=zu%t)dRCvG9jR|JNm+lMkvU4+8o^U0o=aT9$IcitOq|j zs+@e*2+v+DSyM^WOMcN@F^x5Zeh1HsTAq!NDfjeN4*3H#XvV;eT}<%2eEs;U$VL!V zo?SE~+X3`fy1FqdnDCIs`*z;RMtCCjT$#?*0W;o`V-{~^{^9xRUu}IbKD)rpDU$*8 z1F7d_9yK zp~J*wK^GV83ZZ~fm+Df=z7)&@!?X_%nJD05H17=;1w`gu)3*{%!K1e_Ss&GzsO>LN zI}l8Pts3J`&XlI0Cs!L?BY}z0E6vt4x3g~~g7P>1 ze=`2NI%e(g@6JZnqnnB4=h|Ua@zLc=dwM{4ZMD8RIUAcKrY=u#ZHFf|6*ootdVrI~ zzV_ZH8_T|5k{b4Fhhc#=Ti0~;fch@Mr0lcVc_w2fW4Q}mVXS`N_SAP$PFFJO8>+NiO{`vMN zt;6lGv~RAY{Xh@o)qY#L^l~;1+Fj6*+}93G7q!=~AM63+NAKlSG_rA#`mUS}Puii+ zWwq{u&K{UIO~aymdNyXBH;*wn*$(*`qot#S{!i*z0(bPX@#yYd7H7QL;eitKRXvNS zue+f?pKvy+kfJxon770D(BQz#(H<~pD0r}JN)BqdRmL=*Z-+Nt7SflAdR;hHs(8F1 z8?S!1=;bwQhh>MToyA{z;DAlbw=2Wh_*AH=@L5bdgqKurvFzx9`=wdD%(q0jnI3%! z$J=3=+=Ja21fGB6|L5Z0vC865m~$7t=dVxw=#&g+Hf^+(Or!xFm%ll&m+-U4xJVt{ zn+&wk_jTUHzRX3dOCD^8E~KXj?)GLRL-)~Ll9{PA$bD=T^A_oJD}+P zjo44|c1l+mup|87PmAt)yC=ht=;>*`*)$M)qv$YdN%-GkdBbeyWY{72#^({SUvumD z5vzxXx^VBiw`J6kBoK@4<07Zgz*f29Wi;VO@YVtLb>}RX<$knUz4EIg~_o8=a$qZgJ@ijqzSR#arcn3cJKKvl$UMI^%hBn zb4T``4T++`RdfI3{d!&aY2&4YP4pxm4;K^B|&ZdK+lC{8mOmr8mzq9g$KvTTl0pJq2U-r`1;eJ&~4b5 z%Y@Lg=AKJ#VKVIVw|{QmM1zQB<-5%kBtd{C8$U;y8g z%jsthHzHlC{|0*T~Y@d6FELq8qjg}rwaCVNgBLNkW)V-SqFnwUoQA+(s9$s z8G(l?H0XL|KOm}A2evXkitM#?Wc1&EAUU4~A*9`9GnMOLZ)!rBvlboEF6?635*j>J zkhtG0RR`Q>U%J0BrQ_?dvCh7^L_4|aVqPLr2j_W}XUpi(@zUB;QO^{Jes}%R(p2+0 znC-IJ;G#9*&k(9m*vvr#x2BWuS*8x2ZZy|nNYhc|aE9L@LmIrAJ=5c^Q5}rTSoEB} zmyY${CG5<;P~om)kJ3ibI=JZe-Jcx-=etqL7=_TI_00vgcjrmb2-;KTLl=tY5@M7?Z>D?+EzKrbRI zUwo_zldIC!hmM|yxQ`6B%m*x)^e_ti~?@;rYf5m zut_(aWB3dSBE-X2T<;)bxbX9FcOwQUO)Y1uA0vSciG%-M6B*T0hJ9Vk2|s<`m6wcc z672srrCgQZ3m-p{9Z$^5UG7FuRDDUH)x}xOh$CZNVwChZM+Rgr%)XoUf&`0S?6~OF zM@BQQONXOX8IWz{ZzW5VTOiK3HMM|@BK3ZurnU^w2wrTj=|Up*h0GWMLu6!4+b=p- zn*lq%ZM&(dLxP8I+O|wBBjeJU#!}bzGhlpw#Dxu>Bv@}~ePvBM8N1kUTzwq_qOGrx z7i}Pc^l;9Ft`wr5+EEHjk2C(p|4+vMFB}hI_;W4B@jn_5@)ySAz+VFo?H^`vPQ*?A zUcAun@&8}M2VI`1|3tjpMC#X*r<3XbUiy#i<4?n5GGOvAhR5O`Kfr(dyug3eIZeHp zKYw|H-!uTn-=zO{C4a^>Pv>Avn*D6@=AY9q6KtW_f1W?d6`cr3ToyOU`R8=bVBp&tU=dmiBhpGKvg?@GRvI6& z`WX@Tl$IpTv#1ugzFM{;btMxYAG~RCGl~plvrpejyHg9>>{g5km@-j`dTDSEp{IOC zb;2iV4c_ngu=I)^;TN5^r6ues8E%_?>Rqw57H))&$yw?$v1Y(X(=3k+m0!91h5Bpo zYrM&k#$`;5b>hmt_=*f#{CbDmcGbe0vXH3}-x;__bmfUi~)$%f1FQe5|DWka#>+VBBt8GNy|ZNCF-$Vd&o z$vE{4waIsAzNKYQP%m`MXdE>P>rct_*hP&v8H9-v?7eiTHF3Z@P`#BvG7R?!7>`Fz^Aun0OW%Y0@!>3(7nvMlR*Cmdfqry!S^~J3V>cRQx zdZq9ObgZ#l6HdQK#ksbQ547jhLw)k{ml#6FEWwr)Vz;OuTc!EY4eFu7#UZCVfR1-! zZ6jl^QBkR@G`2{)9#ZJ70m3)v_|bnw5$P@!5=^^qk2luA0(#;)hdp%stkszo^NxxZ z7L{Me)9YYOpmLbQO2SXT8~APQ3>svd%yr!Jp$_~nrT|FGIm zl8!F2zBjw)(7@Sw`uaSPdQejgc35Uh$3*k-&Ehpw6c_sB5x3!i0ld zWP0P;V-W3r_cy*{1ueOTFox4YK+()TcdYwm*Sa_+4d zKXyxF=BpM|?`rHE-@(Myq7Y>6#WC%-y@?z&&C=X3>n@ z)qU@=jEbt-Vl8;xZ+BNE5kJM5uxDJC_8vn;zIx>tw;;3s*^ajlnD}&{ctc|LdyJJ^ zP#-AWg6q>+f-};I{oYi;+KTq~c;M9(WvT}emwkTs!Ucg$>~1_Su{rHM7G7@7aSQuDsHeKkruM)c`aku7k zvvKc8#@xJ}>44_Uc$P#ZM(30pH7T%BRdt6GJ_!IrUAwkN-zrc*po+PQi;bzG=jJU6 zd-aF$Jlpf4Dn_vZ2XB;!^%~OQlYawg|5GY%m65ulDAj?o>@C;{WeF+@XAEJq;w^?!JK=8*r7wnY{R!bkOQOys)sDivFhr zot@`2V5-<rttqOlLlu1`dw|7F5rqp~HSP8(yqO^Lu+>P6`d) zWQ|aQwiEhG+oWC|XW;dPZq!eYiczh_XU*DH3Wk-c2R?Tq<_WVFvyMd;W1gQy?H(Eh zLgsP}?LEW5C!efC(@4cQjaQ>`V>Jbx_Sj}wxiav1ll8ns)y0^t)IaA`GX+>J(ky)+ z23GO9U62kh#@%zKuG1=^U>cP}S>qf7pVrTAjtVTsDLwkk6)jJdwm*1H%D!#B1L zt5Y$3jboxMF9T)KZb{C*VmvCIE$6$73XMzn`@K{cc+Y$Tx{Bf=75(EwdAGMdr6l0P!*Su&`DhB$0NnJnMh0{Kl zd+|;w#<$yB$Mf6>KdzeUGVTQobb7sDf&BGiWcL@I`o4~eLUY9Iq>1wmS0fl7PdXK& zWtQV=jpNk6@&A+Ye_4LkqN=+W_^zmHuFa0Xogv+6wb6~ZCr{Ecti2aP6L*I#N{qmq zW%sj(I~yTKOh@@Kr5BF+%;!;yi^4@+aEn#ch~d-?_s=!;!hys=xlNH#XeI0xdz8Bg zVrwa`Egder>+?yLVs`bK+hZAzv4wD4FdWhRV}c zzY=L|MtPf&*_R3V{1kB=m91f@!*^#)^8Guoc_|U=bF~-tNE>mTb`L`_m+t`~_nL9m z)F>8vQ!i-o9cXVf48tJOd>M(zchE4pz=D&IUoq!pwBo8LoFkO_z zjWzaG_&fL;|34T1D6Oc?IYgXx=kGxbX6Z+XB~Ih^S&+7NM5FxaO>NJ&J5}9~-e1bI65#6({D1Hk3fSvrrSC zG8@QzPt&LN=R%+Bz?v(2OCdpAbm*6P%5s_A8tKeq+)z~%a*mdFLA zFk=0A&ITDa#`h=qP9fsnWhF@$msXU(qj~{jAq66yQEw(T8|J}EUyH#TG9^GO{kWcE zDI1$NF+4kYiFkWkQT5fNVrYF>{D?!}6@tx4FAJaohXT50^By;ik4k4=XMj zYPs{@JvrP7I|2`{Realqv#Bc$u1ulftZ(7d{Kq>{uSsY|$?G<>eNaAleuRp-$K4ev zd=yB{UUNA2bQ|7vj2*sIPes$5qxbe~rl8lZ?IvXXHq5M8r|~+Tib=T(RnOm{K(ud7 z1MlTF)PLr};gLy26aUtA`i>M7mpQzU*SrmPiicls3!q|2$HUn2#}qK0F~#JvaU0(1 z7#W?POGO4}H0KF-3TDo|R@X1oh6_ASKRtY(Sij|}gaoHiKx$XZ9rIOf$o>*}TfT*g zI|N7beD_iCp)on*{nR#mvzR*D{3R8`0`f@qFhQ$|HkV^j8}a z%UJaQw&~ola@fqqrWuxFSq0C~)`I2u?#5e4&K`>x7-ZuNE;ad_fX6tnQIx7I8U;f9 z>C=X{vXLr2YfMo7G49M?yd(J5Tlng?{?fHCYzz;&?r`w+W3=~)R^_{J9K?@4j5H(k ztLoOxe_{F<6C90{vhXdC-kWmf4zZDa@P2#6$ymg-9_d}2r+}#{;GJjAM*qOFuD9%G zxY_PIM|aNKKfGS}tF71grfQ5mSe*hwt*%nb-m?KmzG=sPN`!Ob2bvatNCu;YE|Cui zzt)AL*WQI)ErWM%j3a!_$snwhAV4W+Lw%u1McbovXsgkd$-b5hp9a&zt@PPYB|C26 zJ+BN7#kFfY6{Lf&dAohqYc>>|I(liHS_VWPS8D3on+~@uO}_5ZV8hFXfM>;mWpHxd z;lvXL8E|e$f`_A$4PsYctKRZX0=EFU;AfTT@L;4gax*b5S0@QPHSjF^8~;BU|H1wq zq2{gmu%gZ0c62Hg)du@Q7T#c>bfKnw!NzJ78RpE?N)Uq)>n!yam$bJ=7aUMPtW{$skppcrfdlj*Ygx-x!2d{fta?!%9q?! z{5a1?_~9J}dYqp}VpQgVj6=P+z;_DH?TKNV2QpxB=#bj_dHKMovDP^Lg@SD#+}gJV zGmu|d>Y(wgd`Qx?JmoYB3V!$6GFHdHoPF%&VdwMU!_v5+ zFZU=o>SA&+v6qN%QaEw4P&6OhEpDAY^OAzkN8V&Stzw{Vg`l_QXdYb3h)y)UOW-qg zxrY;z0Y3BQmUj?#e%*L~psSpM>vj!aKcCIOVj%PpGLS38wl1DT$j{$)Lq4_-l()2YdAMbuPE$zM1>0H(YKSTqNb7?` zkM(!n>QBdHc^RMbS+zJ**v%>{x(|3mdnoTa(^1j5LMH28EpUx|{Mh)K&~w-JP^4)( z3OQNx41B0Tde?HP195$D`|1lNUA=VNIoOnLc(4}cvks|fZ100yKN-6v#u+$0ZnSmV z@p{~{e`UMi^ggJY?y9_)Cj*<)-6`X?L_C#3dEXw}KFEHv=8B1MI+}!1hfU7bBkz_> zS)&p!MdOW~V;lI~1pC4{)YqfLX5g_xbg>DRO;b_q;w3 zxTrI5uqX}PG?RA~%p!qjTkysN$3B=nr(=PwW(JZfUB7e8s>eqKT_4lt5b{2|SMz*M z#}9?Gx5@L8z}q5}A$GeBPHpix?LnN&%hletQPiA_Yc~%u2QIZig~yn~MRz*rUb^nY zXG;dSY{jkU(FP*7#HTwF{BSw(8Qb=dv3nqE!*tg+aCKEVb^JITgjcf^e(XmToLR20 zs|{9`$1dc!Oou}p;!Rn$WYnAOmORt44J1z}hI13=3jLmF4TU|FLVFz~Oz4S;JfL^lnhXMqRtofeYlV+zdm6SarNg_# zs4wmx$+&stxvd*(TcLWT{MsQtI`D5*)+r6$;$7SdA0C(3=t$9FzUJ!j_nX@I&gq*%wM9Bd_?#gKeB`keTCj z?dCK(9Lw)5UnSiM8DqLJ0$EL%afI!YWY`O7@;Q;GvI;TWZ0fP!6RY`br6K=BF>7Ti(7xe6mSFQe1h-SW|LkG$TdCw2~O_%h-tkt_OosBDo z;eJ`Ss_rIqaOb^qS+^G+TR1lKA1FjYZKF-*txXs$bXU7vsTUshN}e-UDuyD}mG$c? zn$Z2y?0cuy^+Kaiu(pbPAy!%BM7(+3g!kqwkqy%5g{9mFKM4&LfrU=YQ?qAHcunO& zc~3hF9+ZT1EID3*?+agF_i%2)OG*Y`cZRUQ%p;VR6Ilaa`y$1>!<+Esbj2ot9u_QS z`n7DUEyt5HmGZyuYr^R=;hGLHEO;0z?bfls1|B*@992Kkgc=H0IoSOyaJjQ2JW99% z7tQbMIH%r(_E)!LK8R((_FHK(6&^JpZ~Sxt<8%`iC)up4pt7KHmuz@mas^VYPnx&s zHR1A`W-q%gqiFdalOqlL+d-cOZLC&#D#mPIrIt-`)ipvNx=r$9KTi z-7*)ZE^oyfeDBKT&ve1_@@MN6?sT9}*2bicMXhL0K62LXXcsKZ?fg*qxC0i3_|1Jc zrxi^r3qo6Fc0ogjfS{o&8P~}MEO=G>9-C*Zo!UpDL!_2Mllwg~d=oK=zc95G@3D^E z9#!fBvay$}m;f17m-gu0!T0##b&vMjPjt}md~NuY*uPz{_vpQ8sqfL&Ac<2-vkSga z3M1waanem4k>L$4?{Qkr&HD}C=n&d-U!&fa42LV!)K?b2$Eevi)W+s^!7iDA(T#>= zVtgt*(s=Pbruh$y54Y3ddPVfxbYkE8Z~XsU{5iIX$taA~;zpNmYO9HJ;JYvRch&|| zQD;M_3gNLp4STJjt#dlC;=vPkD6uZjIaLs~x1tuWkPqGac&!w2_;Pn1i-W}e_1(LNOZHIk=<%^i`P_Ca+Y-wt*ht0r zx=5ju^))Edv*@M5YU2EZ_WTvS+o(uCu^?;2o`S9R+S_M0w&P_Q*XS)h!q35f?T%7(|04nZ(%UoFy5PjYXa>Gg>u@lSpC_KLKQ07>jv4`ovp#_=JcaM zrd^mN`y_oQp?{_RwrsDn6toZjYH9SO9gnNd4II>^Vo&uM5%0nnjJnMeCL8w@WfCf1 zNfYaN{T-kAwyZjdJ6^VJk9(R8-t|tAL)C0FYA{f!O^QLARIg(Tc;hkXX6v`;Uc%qy zsJs4V{{gg)R%t6blmo3hlNar*WMdHXV9|%jhv?-)esn4~mhgWRR~PlOF}K>;(Mt!= zc*^#d@hRC*E5}>KAoOc(PdDBk_YjdQRBL#}V?0o_AXS9$7bKbMpI#y5fCno^dBsPw zfHLcCfOQ=kIl^`La@cVg!JVpLEs~G@$H%O1JY?g$j}BpmZ-PNZbxBX9ZUr1W+9K7u zoQ>;_j*-)DJja$ZFTU8z=HdL%l{yM>Y;-KVGPSxa7!qey?aePL2ZzIVxCwt9_EA~K zS)4CW{)17NSwSvx%5>E!$Fp(8o3kQW`FEjUR-Do|B2FT=Xxr^^LcYDGyZ*$H1e|f@ z{4>e)JlsB=qv>8W8@*4&?axiUORTGXxTtF?V0id!ouvk`Z&@QCFTIe2yMycmTw}>F zXnUqS(2Ie?sR=&M$BUro(zPPK4hlM(f76w4XW(r5_nn-BoEhy5Bj$gmZ zz?S1xI?XGJ;nd9d-BxuJyjpzvkpQ8mW&fSpxepsqKSu4OZzCC+uMPw}B=)yv4Y59^ z9xDci`LDM_I|WUjPPw=E1OumN8re8cC*hNUD^o8PkU?W@x@8iV$s%-Oq!JO2GALu=4=X?w!}p zuj3;2!@dSb$hFQUVbYBm8~B%Z!sXP}I~*k$ckq+)WhC%FoU^>og zwc3Y*?T=pY8Be32tkn*$xQY&RL7$d_C6~$GhAIO0DgTk9GQ4lQ! z2ja&%@tl&yNjXA)*1ohUPM#EO*!u7QeR?PK+3uhBf=-6~(*~pSPf}23(|l1|LO*w& zLZ?7`2Y$Gtby3@ySYK+Mnzd(ICyW=A=Z4Megu*)Ul>#OdG}hpi2vMbAuE$7Z#%Kp# z#9`;!Yym%X!ipVCgL8dkV3hN6ELusyq;S>|A3}bR>6LzopNujq1y5cTr(mt@ zbD=)5PLPsP-Nz}{34@%#Bes~p!9^OR^eMQW)1|+JkT2?=a@e1bf-5#E&M7C8iSy;x zX1v)%o%I^?V_AHrFNpE*$MqkNCwc0>=ep=NI=X7wRw=4d zQD)2T(=O>GEM~g}e_-??*8*c!Mj%Ji7lt13h$HJFcJx;8>l@9>F7mw zmB+AqVGbUgmzDaYhJ?)*(sxRA(NL26HPhe$75P7kb?&Gn;TZ$ZSyy{|QIu<8(AM!x zTr$k{VO1&#eah4i^NZ85Zg4|Wk~0+}7*+gf9j|2i+Vx;PQIj zGpD&2c+e+CKqHBWZ#ok2J+~$o&nvAoTU5%zZzr5cNiSM(QT&`wc1Jtlc9+%cyTo|+ zHB{=mQc(+ruKgmN8Jdf2LbG14E@EL!rRay}`7Id2Z#YlVq65ww9-5Odhk+)aZeF*G zYr(aW!qw?PxfpvRPM9mGOjj=&xl6=(IquC%2=0JJ&G!8i!k*2;TX;Hmw4ewzrLr}P_#TMc z3R!s)3mf?TM~8?wEFFiNhJz?al`KPvQLCPS0fv`YgsreghUJ2+VVI4e=`1d zv-C_0+8VGTX3fG&<;C;Y%91eDc;yua zp?~WAwuXYeEDXviDlLj2zRNO)WpsQ~J;$1wtTMSBsbz4N|DF@!3MZ|=oow6SPu&4 z$dnycdk16jMfW}#*Mq%5SY$?74{pEf)?PZL2@~$}OuHT00AI51ooJlKLfTiyzTNWg zKujtyyj-##P9?0;-_p>7rmJpCW)(JKoy?Hp%&-P{DR_3oVKxh;48P0V>}iHhZcFp; z3+o~2oR!Vvjvib;_hgeoOe4V4zHin)lWgED^MbESs-Y-8A-w$PKsst}=hR4eC zs{$R~qKJs;F0QXHiSO=OjJIuI<7rl^Uo)u`^?i+QID!3j1}=E*dJYlMforF7msHA=$4t*@^=Fk_=cTvlktuD9rZ zOObEr3k&Y@$Ao$EvG5{g=jxxYN&@H(YpA zj@e8lUT#ws8syZtX`E_-?S_vd&NY?);rZ$xo!|a<%ZHc@Wkp=Ni;-JDC$BqSMfinu zF2@TK@jRufqqbX0h;}G>;fQ86jtzd!HuvbmM^~DntwZnP>NSG=d^XjnF&OO1N5nfn zx^Rkf`I!>z_Nh=CVN~P6?`q~XNBgisU#s(;(;3*iZ!?@&T8$lX;@RRe2*1I4!^5V} zO7QcNPjPCU)hP6JuG`4rK1}8RHvF>rELc8c7o}{dLV>56VG{F*c_96RoVr5@ZEm}*e z$c&7XgVA?$4@-70em9scKj>bEp4L(!1MOMh|2WW1_+%fp=0}xFX4T>+8YIIg_XiNEE-Jd4rRRL4*CfpNuwxq}piN`6o1dJLnR|GfF`n4|9%uHVx}Z;d^xH6kO}FYFm^E@g196nyY}sp+J_Vz*=RqQpzsbv z9(14hI*pFHCH9xCFH(@}vzP1p{^!8YZZg@V{D0W{@<1xPuJ3aUAwp77np7eUXwaa& zR|8Fwgs3Dkq|8&OBu$D;k&t6dlxQ-hj?7aTGNdArj`5fxBEFsBdETe{yWjh}pZk5^ zp7(n?f1GvJUTd$lueHbPx_)PGvJWB4j3pc5$aAF2hm#kvR^jERduMpDf1>B1lpD7_ z>gkOR+&AB*#t{4u^Xb|dWWU5B&LMh6J$?Np-@@ITRhV@OL+O2I1D&ofT_qGzPyZGF zpBw*x#>zdFiCYzvc{y@Q=#pn2%(@67yym4{miYz>FTgFExqTjaI?YJCIDIKc%o=$MI#^UOIjuQDyY_<5% zssPgxdSX<_ftyov@h+i^gyl<^xS;>r%x!)pbf-$`dWz#-`h=h?yO++JRi+7z{u6($I}-aMLvEc-5c~{s&enFrJoKIx@Vc(Om9lFl3E%58aE%0w@%(#g|(XS zRoMs4be)-3MH$@r^i|*dA70#FONUL%0u_k9b+6qz#pKucLbTBNa+xaJVArI~L-a!m zUVjXy=F>SIpAb#nR!c7s51p4m$`iR`$1LZt*Z5d055-2P3MWm&)#pih%1)zM6_*R> zefJ8MPA(zup$n9`1ok!4<1bb@m-X0%zagyaFx#d&`^Y6L*W}ENsrFSFCWt z&x&g48IZ}jR=1gcSWt*#Ezy^_ds41Q^$ng{VAPhgu9-gVRmW8SA{u>gl8HnzroCSzR9Z;n(2bheny*x3+WFwpV=|xel30d7P`eX zVt*P1I!%av$$7WRr>EZFsJ;W2h1WOJEebOnSQTF3xk~~c+%nT`XK&K0%n z#Y%#cM*NxWl6#ABIbY`%ULz(w0Ys&)+q}ZBwOBA~?QOa%&HYrNC6n&`<;Ww8FWI<2 z=+)|-j>Y&xlYMNk4wK%R8KQ1${0b{PRm#aWy+yyKUcFzymPtS3>J}HhF$ZU{rv&%! zD#pUP+qHxAm~`ncn`&Cf^Mx5pA_q?#xkYckerHX@X(m0q`PlyRb8_(hkBnJ?qQ$tY z>%89tVS0Q(wjD$#m%3C6Wg<|1Q8TMx_tvvg-jh?jVm`!hC z8UCPN7I(8RnNA6DRdb)lq<@*Krd;tV9lu@VR2xn9Eh;)|<*ZWDMi-RVnWZLFMPIi4 zjVmp-fxclwOZy5k4-Pj7f)ig8of7o5lY<6Hy_r=@Tri6KtL} z(4SPx3t}=~PjPT}zc<`S=RY+yDq;N%dbqA@yh?99y*bQoyVI@L^bcG3vu53Ips%Uq z+pvfGBYo9m?k}<3ME~IOH6Rd3*IJdvW5D*2emJyx$2pNIdT-`D5vz~}y0!AgBn2|> zCr*rV@nUVFcPMMiuQLdwU#*(Fz?jVAaG;khoBuWaQ=f@zq9ED#YmwnKoL^6m>e~K% z3nt~?4c=aJ&d^V72tQAaCiA!ukKPNn*YsOPPwYFtG|(INH+V10tfyamiZU!VH_=0u zov68Z{sO(uL)mwGcs>1X!5R)tY88Fc4UPS%r-5$zwz$hRte$>A_M(c7Rug?L$J4}* z#ZGip=c}Sj@;-RZLVDNsi?8V`Kea8nB=Ct|oe}rmIf~5V!HOK}nxs6a*|;jthR&wJ zv)}Df{jd1{KMnFHP>8L6AOav(BvU}g-V9bB`A-%ZQ`@y07tas+a~?CW-$ z?kPF&E@0imrIT|YXu*>A`$>9V6&x_;$$@rt8+t$CMK`SP>>_#v;~UU-bj#<<0lwk(2Y0DCaOcWO?mfis>KNA^gH_qU;Uls17NLc14{bG$0Jpn$TWE==by3n-^!p()d!LT%!M};&?f%$%gp+ z&C2aWUdzJuV9xq%Sg>EbCu2i4+$fImD2-bxo4^{oCU|N zba=%Gy_5eXV*(2~RjO;T;hw%MSbnDQ+*6%YsmfenEMIsx{Y@^&cP~1AknqgwD*a5tPr28kdV%Oq@NLNx zF3*K7Z$7W?(p+e($aUjr$OYYLY1yHm_Cqd%co8hTaO- z56N&xFN*CTBN-mw+p*X9T{0*h@7n@($q)&O?n|4KVaKe}&;9Mm;Ajw4vYpUp7AExq zJ;@LoU!nV{EEyPHdB~$H8Pp{ryvm84g<@|tcCe=a>)^GRCGV5rxf{#O5T#kPqk9>2z%$&WBCxJgQSz3LyPn`fUZ)0?4@Q z`-OKx0my3?%=1o2ec|-Yt1@xB>@)Usi0@+!*L|%VmS?Z061rWP- ztK2|mJ_OfZcnFjNaM0Fh(EXeb$_BPI5ybw__Mb1W-`+^ife)-lDo#Jo0kq+=Ms{)z z+>luIT=@9wXx37BpQgWb$s_4dbKL;KtQOmC+ z<$!}Ul8T7S0d*E8h!NgyrdgY2Y7QtrTK*^_GY1yWI5>??_=axPY2sNq@U#8rSYnS= zn-jp^zR+WEX9Cnep3Ys55+HU-f7{)y2{6<=)iekvfOuMh_t9+$P+NOgv5V;UJ$HSd zq?iDgPZ<}gOD4eJdN-*@`x4-sBUkcqsRX!rsp?MB!35az=v*}C$^_tdFEiYFH~|jA z%Z#pN3E+9?^!3dL5Cm&lgfm=0f|fR zH)KNL9is~k5}6Rw>#TBJHWTJv>Ct*i@*kYiXMYkhf72g-?4N(c&rv@_ourol;j`nP zcMzJ+yJbzjZUPkbL?}N#k^n6}2QZL0W@K-io8Ub)NKYPx#CJ$;t6fpWXE;0A7VC z>rtx$e!737p2hS>p4qU@CuLCFCmWcepIrv8WkbK2-c@tIZ0I~*t(@nc4X0!DMX`4_ za9g#!IpdlQLB(D_i#w`_2^JZI~rplo22)fk;Umkn9ggOLKh+2EY^*2&~T zHiTnml_A|2 zWLE2mko3Z&+g@w4NV!z6b7vDj%N!MpKW(JL?%G@Q>k4#O!0J)^c>^8#RNE^eH_;&| zU@GPPE;=~K)epI-(7}1uyPB7K=-{0&?W3(a9jaH|Wmj28hu0ml2h5241J88l^-6R| zWOBKR8_>b_%BC~xiJcR(b_dDtr$hQBuUR{m(c#^+_x|UJzn|@Y7Wp4xnoJEH_ zgM;CY;;EXG0l`pnT(Vv^EEslJ)JY${9Smy+2cL__1;fehjmJc81%nUY_NR3*!SJzV z=kWe(!H_>wX6miPU`T)Y>{xS9FvMHGJgAl)33P!dl}I;JHyFN~49}WWDkXBa7o*-!5K5IqwG#b=C z@KOH42m-m-Z3PdwXwY{grrM`12%J7CdFLr#H_DDVDl(F%UXm6`@c#I)zpQ+ zwz%p%yH6poTCCUX9igVuEXg*_As{Qvmiv|P@42b0K7_B(7=~{|KXB&aHGU-B_gY~^ z>W2`BU8CXtusZ}SbWy{*>JV6|Ym{Ez9s*a(9{{~71pJl@9eUCd0yksZCWjDt;(OhN z{zU#q^V^@5clWzrs#Fb!D&>1-#s|Y82^)t*?+J$jKd~yy1L42%_q3uVZF-3p)a{m4 zdr|2HdP(V9FTL=Biy7fr8_K+3i^4b486^E&1V3LI$Ntw-A9i{Hk~ZJ0n(qbOZ&qoIK4G zavuU}ykNCf*w3Tkr@U!W!#*IwKi0=YH)YGs6%&Y-+2B>d0z&%)(<`;xais7|Ep z-5%x+xt6y!c)Gd6K`HM2dvChK#t26rudD8GvzNDJUywW8zU?RBcFi3g(seC%g}cKs zKh-JMj=6)pjCYk~vOCP;l3VU$;SP2QwxFNp4y~p$hTU!5fi^pM^PVhs&|j73<7Va# zSEOjdI?voeimt}mNecx347w)mRv=jSZTw;t9SAJ56ZPlb4}=5@e!W|_1A%hgj9n!* z5NInmWy}i-1jED(#iH;)P_I`I@(2lpLxn5PK7Jkuvh5YSfPxoqVdzd$(F!F`u4nb;lq{J-N>f7DMBICM)i#RD$iKCk_N>Iu8H zT0hQ9^Z@mmM@B&uPdGHW@!0Z<9`NmQu8eu92TW4FmT=6)17bq2y!0*e_{;w34~j3i z^;$!$;*@0~OKhM%Vt=g80UJ0ed|_+vWgGAmr08BcVgtu&U4c>@vY0Ug&!6=iv1R&XXw-Lxj$K98q#%nmE8xWIv^p)cbf0+47ps#_;AErHYve>lL9~jEaOTpa!V6*6e3w60afO&mVaL~5i2>SSz;xJA8c*h#k2{lRb{`Cbm#<#EW+(HI!5r=VK<1f! zF2{BmlKG;@GlsuWKMd~V3f4_O9tNSzgazqpVPMWuYN|r`4!zt(USvMXi8i+~unvQ( zWh>cBY$w8M|sjeEU{D}R4vap_`z7X?%SXS$@?@#w{ zI+hd>ASQ}YcEa_~{b;7MTZ>LlFOuBL^iDT?j@*ZlsI1^CVfpi1 zv^{>Krw&UVayc0+abBko&23nD`oWegbf?GgWc1B!wD=h0h`4j(&mP}i8Q2S8wMeei zq{NBXFLuMx2|NOrw=6H?w$DWD-)^EPHaHoV&J@3_Y{ZR~FEmV&Ix2wM5}IQ)%_#T{ zUcS0SvQB;(tV01lg)4ogm!OA-OJ{PFQSiP7_w}k%n~}A_P3g;ub?B&#LE_Eh zMM!=1hK4+=Ayl)m?tzmb1(dCY_sv3)Z(CB>jU-#{+E~KD*go&#C zCS+aR(}B+Vb*+k9%tRa-nsVA#I)3BNgnc;D)#n3x!@6=ql|mV6+Rliwel!(_59bWk zYf$ky=Lcs!x7H(G57s3!>?_dZH7h?EoSTaIY<+Jc9V!-H&~qs|wG$P;ED)Y-*nu?D z_UO$!(}_gR6fVeF-hpzLv!0LiYex!)^mlCA(uQ^&x)5m3(t%zpW=Qx%wxMh9FO?}? zs6-N;FMAU@GtsWsqU$twhoi1zR~8s}TthcwWNxWGtVH*ed$ul9%0d@ZTU^Q}MIf(7 ze%;g8`Jk+z^^LK*q&=D>%UVfhp^$EnT(>L>C4QL}xrLF5?$+rEWIz_$JZVOTNlzx4 zowWXbihUNkcsi(X0?I-YSiR=vZ<~(Y=E@znPGQ3(x{LfpUF*@o{|VI%u*%LUj~vttpVM;WF|*)h(2GHJ(d^3U+{ht{=g zDv%ie6{$g=8l+WQGi~5d30fA~UXaK84ha}Yv+t`aLvO9VGFAh`OU&FC^Cb&Jbw9S6p^wHDdhJe zzShS#p4WDxRa!jeAw^}Vjb_NWwY3o4v^BFSe)S5yS`>-+ymHZsl})+Nl*`c6L$O!5 zpXDQg=oK5x=9HnYf#M1?8gdckLP`2wwK~+BYFZq!tr~rGSV*;cRfn?f?74fFrv@3E z@vu-VsYgv|;t!>+zDK)?f>n1Y)+1TY)|qY3YEaZ#Z0sn)K->8{x0<#;LTPPJKJF13 zKt(qMHN`zl(Li3Hmet(`RMauy&GUti(Cnb);LfL1#O$vu za;~BXy$2$FwNKIHqn098URrGp+VY%h zi&R%Nay|BJ=f&66=!?vTsrP!`{iZ+u*^wY*$GTU$TszUs^ZW`a%und>^Ry)fqMfLE zSBF!N!6)P9~Z+5H$1(&r$7dar5=WdTg336 zz1|=7ue&a0&%U$)2g`V#X@4(@m$uBsTi?#d z*H^G)CMvGML5f#CCMnIw=O+&BsXJPVCaAXC$j>lC&;8vs2O6_by^{O%%;GCZ{%*Uo z=i?HjPOapp-n2lePX?@`Q?roGqIIQvu3bXwd)rKga0#khPYK!H=7|i~X~`s9E=If0 z^)H(I?JC;Rde&K+S&SGvd_>%^2ikIHnS)hK2?}tS`LdSgh2qx9cn!0$U@zqZIxliU z(57!37S>yO(OpVgGT+N2w8p~jJT0G!4}G-lu?-ACzOu=?9(?$WMBH(FyGSBx*saF9 zW_}-HvvW}`GE6`zI|MIV@99O#T3;Nf z&5@J)ixbg&W%I=Mnu3`1^;x}a#EZ+Uo8op_FwpLod)l)fIrkNJW&Ot?Bb{m>Vb@wEGB+?O6yrFpg}JgW|I$GkJFtSLgX`zOWp_%tFT z_5%z%whFY?FE@v=xgIUCy|k$4!%K8}?u$sT3_@0s@da@CzPoT#(Mb@IgLUwzaPz?fc zGqq3gRft+~b#QGmFQy7V&m3C9gRN9ou^!9k#c~y4Y4v72_?u1p?7la=*fyK7Skr|E zuRb&Xzzrf_x>YzbO`ZoY^P4j?>E;b&w;<*I)aG!+m*&_rPredq&2zCxlgUKs&r^=x zW(J~^H_-)yDiP@L(V^byt11yitUEv_EEDNY@E;IKszf`h<^6T%m7~thiEK6bm5BN2 zUQW^Sa+Kq0AN+1NnO8VZXe?`bg|xydGLGX46d~|T)%e{jBx!4{CEWA`SgfBpG=lym1gcIUlh2VSXA?ySzRA(#N;x+fF9Me?A|!Fnw8WbNU;a#j={? z-BBujHgAoh!R-Mg^dMsa)0c{kPdkb2YaKu|9h$}cxFJ-!V$Y;W=6y&e{9~8m;UU!f zp}hTpOdt9eIu|5%Y;5_d=YsqT#UuGwJSY@S%IHZrtkjOoU-9^dcohAL$3Mj5&A&oC z#?PxN9xF^ok&$1?nm9_Ltk z)Rf;*7)$OLGh@f~pWFM-t^8g3FEkD}jFsxA&P)6h{*hvci;Tt>#=^`)Su>JOrLa;2 z$!Mu(JzCnyq(P0=9Z*(sQ&tYMj;4>-PJsXSPafWrA{`|;I|mNYJTLRl%Ymx0WxKWK zsh@Ph=GTD9M`xn-{!{Xco+YBKd_Ll)c&T>Tqv= z=hke{7gOZ7BKXDKf{hFIuFZzK3aRv4Te9HYx z#+O;LWB5m+FFY}r)0*gy;2(*6%4)ffD#Y#x{&9OY6tLFj4-vf){G(hpR4MEfxiL8# z%32gSYlX6b7U{FGYD)G9{_*0BY%pDTDB%30Y{(UIp88TS8}`0dW09Ch?562|;vxA2 z4{<(ZvSx#VR<{mKARAg1d{I&UngwO1vR%ELg!c-LHyp}>z$E4@4q|6j){+iZk}fZF z=Y7nyY*=2s54jQCq5#r)KjC98?2TKn>PSm2?7HqcIr?)h)VMuovn9OhwnJrho=F$LSdHlu`U6 za}58;lQN2bq^6AGALoqWAN$AfkHpRh{_)co{*g5W-Uep2%Me`S-LkbD1q8n+Qufug zp3t|nTWv8@3!taA=Z!u882)j30o3&AJ^98}0PDFev$qoYj&8+`rwGo`QvQZ+oKOLT z?>nYEm9qe*73#F_oK^s2yFRnnZ~0Itg`@b#4=;0IDIdq;B}D!m|9F|;8Fw@Wm@XiA#_#yY1%z)8y!3`n@Q&Z{k5#!j z(4?5Bs8uqGf5gO|<5gzdbHabeKOU_d^Iw$%l@+30?~6zM-y!x(rVVcH$S3&gvd<~z zFLK}-V~T7nkzcbh@$@G0^MMTu=RY0wA2H@XV$44#{0RP%`1|g^ddxq4%>Twxg5$hp znSRtf0j5kbkht!g0Qb*+tc*LJ0Nb*?FUeaafb{I`p~c1o*Qcqxgyl>E+`G3_m`?Py z#k=I^m?c0*U8&4d`2_Ncrt7+}WdbyRG@DbpBLP^+5o>c!BtVjIGoP7K0-X4~?&UoP zf&+|Pxmc6n{=Q_^4@TRJ;UBkUf_Ri9wRv|Y6fG-%?4g_q*Z5`$v+oeHf}yl34)jWBl~|OAHhE^4g{f!yB{ND0zpYe+w=8}KnM*v zw{ItTuYQsl*x?r%2)Ntr=*P%F$mZl%nz1qvCX}7?TPZjG{`-%-{J#A4msI>^c_5sT z6WGbUHV_yEeG-<$?+E^qyr2J|%Cc{WyuXjs?0&i9ULa_PMKc}|y%3XG(l0j#LeT-Q zg#iSI81i-Q5s#>BNNG<~%ZtngZ>}M+iP2=<3g$n5ir^aWzrUf@6`l@BlQ zn{;?*Iy`R(h;rGL4j&_|Jmt5f!z4U6)na=(+!8scpn=k1I%Urz7f6RyDife*Q#wr9 z`&Kk;8^JL?Nw3*Y@Qhvt3fb*zM)8lF1n=nf!TVB*>?rMFw@yyv%7^{(HkTGoA$Y_4yEWJeZt+~h#cjPD`J?#93Hk75YrZoJPd+UF zbe!>yJ0JM@!~`8D=fjiuFC2>rE;3+6?tx^sd^o$0KD>{Vjg#ZehnfAUp9YDY zd+RNXtp)SJt*ZW<3BgZ(3*sWP z>HgGtUuyGV#pJ4a9(5#r=8#o5BOkOhmX(w?5ZvT4&G{<`uF9Zna@JfW#;hq%^`S5r6+?HF$72H`|;uGU21e_vd5ZImUMV_ zQ{2J*5FN7SrG{G(o{f6SKwgIqTeylLfp&E07dgG7`ob9gQH2iN3y!;o z9UH?x65M0+ybaF|IMN~dsm()Ef_tnv7tb0)?DeE%DxW5}$IYkZDJ{g$h<|H>tDI8R zl}GH3;4g_CZ>Ebm*wEpaVO`-};#aWZ)D2PMZ^S>bJL2Dk4tH4WN@uH%;V+Hp5L;ER z#CdcKf9W)azdSgGza)M}@R!@h@Rx__;Iu_`VAI(~Tw`5+ROMuzgg2BrX4E*kZu1i0W7e@HnmxMB~!)A58 z`VI?zTT@%fRou?W!yb2(NkE zKrCKUmDbkE~hcP9`w1L2OB=1o=O%NO0`~hdA;-Tag4_u@oj2>inG6kjBJG z+l*pv6fns-o*qgAIpo|@9H8`|l!;SznK#gbnV9WNq>M-z6E}E7IRp8hacZN4{i{|c ze(6`Pmv@ioO^O=aQpdz@Un^Hx`!ez3v?2fGJSJ9r(6d>~s2jUXk-oIStQ%io?oeN& z*^MtoJvk)Kx(8HAzU!wxv|4my?*}j5N9z;&TelUaICo?9=4U<~YTfuvN#VYF%kHte z?tdQtG&(cJSwyYxmYxar7OP;-D zVmvP?**}|!t@9RY3(RWA^Uob@!P#dlJXa2^{UX*gbUm zvMMH43z~bcot$q}EhbS|K7)y0h|DNFJD-W09W!2d@iOtliiD80WPkavY=n9zFtJ@d z_o+ZWV&`S#Nr@?BKYHooHKj9|c(?8`N?~^!W-E7?ID0meEbUHeX!NyV7cR!(hoVfZ zx?st486GC~xbk>r+-fFPvSPFJCHvy@7Ohg5D8u~e{@2w%N<1ysglU&D6bcoZ@T&9d zr$sh3Va<~CZw87@Sd{sA|NL!D*w1)ttdU9+ULG{O#h%C|o=Kd~AoAreCLKAmvI(25 zI{39;uL;+kyeU;pc>A;uGxzB?;kR_>gG(fua7w?{i&V`fyfK`+vq-!N>*lVRr9=3i z?cdC^c}K7-7kCD8G(Nk@1p@w;4aLPK!hv-jiyzBQ1b&O2;%VFyp*BY|!b4;t2y%XK zv$o`d2Pa~#IG^K!j%`!T!sREzHkLt zyYk)9hFs7u>wiz>ITtLX`xYBcp9l@_R(cihX~TP+(AJ4sZCGffZ}5F+!-^Vb(&~1$ z;k4k#el)T#zVo%GtffkA_(0wY^<339Z09hn#d^36ADl1KUp~JLD~DT!I__`7pVlmR zN|$ZJuRqPaaow~Ht0!v9Q^-E{H?{n<`cAcB`xh%5-%GdQW|li~QAgW;(;t8A&ws>+ zR^M(BLk!HrDXO$3o`Hkw?CTh$|9kQF+N%-%K!Aa^?0N=P@OJtrpUA*A9X2+Y@XBWG ztrCR)Bl~|OcU}ImFk69v)2ofDSJ4@`)Wg8dQ-y)0-@xv8GCw4y9u`U3%fK8N&ovAQ z-^vm-(Q_*UE75tJ3dsEONA~|n{#X4+S(*(NUHFjbGzAX3E*v2EdPSQ`7oNT=grAM1J2t8- zY&hD5zs`U7@$AJe?42IDMDIu!4(T|qOFP?z@7m~Ut`>bf42XGWtexS zWh1^LS7O{q)*YL~gUcLFG-A56U+ikKz9={_Ka0b$5sSE=hd8nxdBZ(znV(%F?#sz{ z+I^`JPgvP>XQ^%@PA$|j`#{zc@Aq60Q#sOzrH;B84c=2|6tXvHGUU(QGh zx8m#0>vz}BYsIIsu6(^dsTI2<#rW);)`}}wbkE9@{BBK0PNl7F#TxO0PvoerSk_UV z5+>P-R}ZdnSkTvk%`>83o)m4xyn~&-lP9#|4U_IGI!d+TZM0QgsVxk=r)qbI=vM}& zHDt z--zLgWXtN?EjVh*o(VJJTd-o#;f70hTX3rDM;?>p7VOBUtFS7d1@q4RATJQrf~D?y zJm~Rl!OwPOCKiXc;JPG-nM0RbaLHp?Q?nN>_{yC4`(f5CxMStYn74^7IN))Yo$c8c zy#M-q$&$<#yv~(n+Z4YRg2%H7zWt`=)7yS2kVDSvxn#Gp_c5|bg<4IPL;m1H@3G2@VPN(;tdBF{N$7PDAf)! zPfzD(FK~d4n#)OPEq1_GlvK3d*#SPTpJ#YA$__XOZ>4uCIY6%QnGG}S?I7RvPCh&N zf6l?(#}7E}r$OoNoS5nEG=t5Tj2x#UW+S(WZXSm`UOG$bi zR}F+#27r7-l+Wb10dPv-&Xfl7f6KVLVtxtKKsd5kf9KVr0ARoO^0rWO0Gxcwm?xhU z06T+#h z)tz=w8?r8mCS?bwvfi#fyUPxuJ~!6q%hE;9ul8(EPj}xrCS1o0D#tClsC$O2?@GkPT(2f4tkkRaK8L-gj$>vEI+NF z`7GB7TpV`Ffvyv%&zUH*vfqh3U!v1vyqw^^j=`{Cy%R*m?xS#=a)OUvb-x)rbb`}$ z>MFX5PQaOHtJ&qI4;^oMGzHh_!_ka|o4nW!K;szG=J0)eNV>a3DMi@;`f^H>)CTm) zbDQ(MDzEh+>e03>8_(&(;pb0$H}V<6eJRVydB*wxM?XKR`(OaY?4_PVSRc~Lztn#d zF@O+(w9f&j^*~&uv%!0Z0pzBgZ3?`j_tX8~(Ac&2`28pt$V)3nkx^i_&6W1ujUiUBDeK%EVLvOdajpDh-em=c?uV@7 zYqEkB0-H+H&yw^nlNzXMRv@UnIV`B2+Ffcr!+8LhPc1KiCKL;l-&}v>#&cXDK6LBSz&yn`L zmY(y~88Wtu+j}vcK`YnuXmq7BTqsRg_;K$!@cp`|?o*vJaF_}fX)itp`=rxiW~VsA z&W+sdjXdW7Z%x!+666frL%y?pSD%9qB2lFx+0MV|kAJqm_nkZ_Z*fo^DBqUjO!{_6 zQd=SWTt^+|>ONYy?CEyU&`PAg3seXG8WU9q%k9v&v9x$8;pg_0mdDy||I7Yu7qPfW zY_^Bh&XW5}PTRqP@L2wN%kAN@d3Qt8c{{MS5Th(zY7YwQR`lszumjcAhc=dOv{KhgD5 zI7lDd|LGYs9OT+Ku73Uy4t*A~G83D^K{nWn|H!*=$mYFqJG(6$CUw2kVj=R{?98z9 z)CiCpR&#%q6Ar~!?!-A!B7jA%%Bvwe92UL)ba_8_1iY30bfCC691JanD-ZAx{dXtX zpOl2db&GqZ3){m$ZnJDjQezmry09+F?^75|9XPviMQs>t-xwFKL(&~n<(&sUgu$K$ z>$bLzFwkXj3_8;s29I7Q$;XlW>GS)j!Np-PSO4p^nP18OJNJNJ^_wuj8#I>Na(3?9dNIjB~K!KPQwM2l>%!G8WGpR2pCfxmm{7iWuWu=VTfOrIUs zz#=3)kH_&Eq@9{qrlx)kV#Jr=o#(EBq%>Ek+<|NGy7v0lJIU9eYgXQ#Ny68F-g9yz z^~E*NNHTNTCvgp)Mbx^9WnF_gu5ss|iC%-LD+`0^4~RYKyOQbx#IMP4_F8RcU^~9t zePFXQIK<9?RCQZoC+b6DR(|3j$ z4G%YL^mhigdF3;o3p&HY$p?A2WjX`rX@vxTSL^A|`ukD;f4l#`TK-@8|7ZHiuQ>nz8RyZf`t|+q8i60@lfTOUQ5=7?|3~`2 zHm`dd$-SD_5a^n|3CJ}Kg<8s`uVf|e$@Zp?*Ffr|5yJ1-v03`zW=|* z_g`qAjlR(sQ-10`8)IdlSX&wy8tPl>>y0H?9W^_lZ>eoG&g-h5O_%57t8ZDek&PYN><@R5ro`Nr74UHCu7)rvA6|8aeC{YEA>Cf{RV zY-DYtHy)Yarfch)l8Bm(mEdpnJW`P(R!7SHn{xdi|A+GZ2if@kDo!I-{^!4b+n>cp z3PEKVE!niOB8_baL-~bTkEa*rewgjwE8}l9o4ZqN;HM3)R7&UzVFgBYH4)$rP1FM3L8lmnls9+BENYk z%H;dIuHb*PucCj%Y~1vS{EzHTBwFKkSC8cQXTHYQcbqVq`xieNhkuuN{CGe4S9!`< zJNiBPqy1m)?cb}t{i^SOt@{44KmJ*Myq}Mcmw%T3v;Kb6|Fiyn)c?`mpXGnF_hoe-!U|n%*itP!)4-sZqI4l%!ctFe~tdOU5Sqv9Dgn~-p@w+)ZhEtIRDQ- zm!eD>y;Aso3;rKIKli!R&x;34{@)RgKN^R{N5)|x(w@Ix&-d|o^tw1H3L6#oet+nv zs-iaX?C-na_s3^XzR^Z8-U7#)$p4P^G5#EPy!7KdE9Ji`KA~grSuq}%F?#d|{=b(U ze@-{oa)aB%}o??l?+Xu-`&hHW`{l|I59Rp@{R>k zV~jo>qZMP6XKXYP9HaBco*U|pQP(kgbBqp+QO*gY4yTMcK0jW7v2y%u63%&HSMXyN z{2|UaX{pdxbg#1B_k2U|BeK|}&aq%8yP#QmTUhYIbW634kAvt<-1HPq zodI<9u=!J&v;lN*bxGtIJ_@cboc%JBn~FsaMQrvC8%EU4_d|Gg4kM%KN7hT(Q*j61 zF?qcC8}eXU&$5-K;0Oa=E6$q(sNnS1SL@DxL)o{3E{b?laj5e7p#s_4;{jBWS}m+9F@z?S7~d&pW5vY}U&)@ENWqQg zrA6c)Qt;Xy9d@so!^p8mkfZfJ3$C$m31AT?adlXv;$Sg^=3H!f)a)^YjCfnG7`~xk z_Tv$AXNpnq!KQ*Q!bKD;>||6Zd2tA>dOC^y`r2XC!xHV58%Mz{1G(E~Oc_G$L(xy? z=#%rA%%zUL==+KymrR=8`hDH@Qux$Md zCfh1fuiMWoEt4KXTE?|9RMPG!f!-^HR0dF#s!lH7Q3{rSXG6(Pq~O(@0&Pwk2T-){ z(k(AahtMsz7hbxS!>H)gHRi(RVZ>A`Me;Qi+!ryIi&f?;Iw31Cqx8TKy6JCG-+zdT zIZob5Igm)jvrMi$6Ie2g`s|07Ox!w*?B>rAn}3+Je|NdQGJgt=H*4u}PU%NW`0tqa zIa2VaL2*H*8)E1qsHG?MSrZEdZ`>r}8*pqG1-5q9 zJ@6SO=M{P|znTo8y^Ub7VGAqHK6-NM$(nB{s7~S~Pu@4=b0)~w0#PtJ9(8!}=K&;O z)SX*4br==9Xev%|8herR+2c$<$H`3&lA7&u5#WG3Q|cuGT(v=({?#0Z~H>d3FBz5T9Zz&W5E~0l8%Scv8FciZlz}cYCAn#b$QA*F1X1 z#xqoG!Qa=gltIN!A<>mJByQ8o?7i16A^m2ng?}}T6~{RZf7B-BHP5^M`EU&jw%#Lk z?NcHfX02W!{X}R8&6IUZj`#Y8v~{&E&Qkn_2F)^@c12O}uH74gzL-(5y7;Vz&vGa@ z^IDMxSK9_>=`RWYzq16DZ`IvG~kNc?ymaFgba;`Q2_Lnyhsdq;Z5FuMPK zkz6xk#V3OjXmdoUIO_4ib*EoZ@WQ(bp6yl~Mm%Nn#P=4lU}YE4TCvMiEb3WNT1L)k zz53j8?c4n+;i`Luf*;|3M4wAru;AVp-ffjJj3CCa397z{^fm#}$x#E$Jb*q^nqP#u=4B?{Zdb z!I6~x_%sWCuv^Bow}XlmD<-Pc407RH#pY6FbJ#FSpckAyJA^o{zE=)Vr{V>2l$JF=wOTS;kWr%U`8pfxf&N;SJx= zx8uVGmiLDci{((6(vs`>k^U=UO7nGBgTue z;>me>XF|Tll?)@LQwxJ1knuU-x~HPzO&0v9L(=iw4pRSp7W z8T?nYNx27&id(A&(N(q75Y{6Uyy*Okiq>WdruEsCa+^@G+&XlNtDJ(>9?LB2%=m_M zSd7=OC{uBp5r@M&ZdR<}@w#bY9xE<4_RJ5e8$d}qFKaFcQ*csj@BEo$oH>~!6?rm| zidnYo@j&mWIF+Yymo91Nhc_FYY5qpVB~lwwd&oGpCvT&d&|wN@$qPvCCgVp8q$E`M zlkrJ0WCuH$r{`&XE>L(!;QI|KyJddx$e(rt1__{ZDY+b4`RxBRMXt}5qUb5~L`Ws%xRA&?@^t*zrVL0e~BzLDCjRV~NJ`Kfk-) zAH%(n+ryF1_;vrtk4ibyM(Ox>2Yvk4C>?*lF`hook86#mkMra9#?#03$Ejf2KiVDF z|6BUFe2o4k9Y*sDrtwgmMt*+pKi}Jfx~0iTXnr>?W9?u(4&zkn(5KzA4eQZAOL-_R zW967QQVv~$MEgJOT?u#;MfRU~hKS0pC{g)q6F{OQ!(2I4W?%vY1wlX-6dgM=J(*@_ zro$ZMKok`aTtyB!7V$6m>0gc&k55@#ui0gF!DES_E8>Ovt+>J>DkymIf3J?7PG>rk z@Tp7O?k`_zs_NCNSFc`GS9SfWs$LExLR3)Wd6|Jc=hn{I^HqGO`)FNmSIq~h05B48sZJWMXZpnw!r}kGv@CBWR=D#VgH+JN#mdx0E)?-t-tRAbeu2y1j16bbdu8e z{DrkIdT3~R9d^PUyEjc<^nx1RXW|p{{|NWGoj2j4*|&~Qhx2q@gds0|e#vH)o`+wb z8r=NYZ7;V9C-;Iqx{=-w2KR(<`eEC5z0rN*OP;2ax_8-p`pcf~D<8gJgnPXn4ova? z#`ZUrZr9QWmfQ~aO7%YLq~QKjo>ap*pCj+C*tl#VYuA=^xn@}p?s&`Fnx5P%bj=c# zPi(>Xx8^Uthup98#J(3iGolU0-vjpq?SB0|Imi2==aGNEG3x#mfA*X;?vf3;!~UYC z>(%qRXt%%?)o&rsp_`;JgZgi`zodqfdtp9$iQbQL?sJ|=z54(0Be;)+(+~5G8bNGq z1P$T#hT{T!;>j2wg(Y7=1_ZMd_qmjy%jb;CwGolTv*8OFS75-1(N%+d4kU6}?P)T& z#gINY9>FIrqt*Kop_l|H>u>;U5pIBP(r1Nz$+kwj3u6ag%{F*%~GA-2G2vlZxb1Pr@h ziLmqqW0hngB#ed%6vie91YKZC%$QP%$&fLj^VNl9$ivpUOpt??E)kLvB2e1PGt=^M zov#6iik%txsWhmg;w*fckS2q!L9Uq64&hZIE_p@f47ry5F-c5F#Jue`+sNuB-|%Q~ zs2q#xb?+BzsMQtN%ZhCknWDo|v^Eif0+h7U1E%hP9UyrN)3R4s%XP*;Xqc^x7&*r0 zYHcsf8mQcQ^F>3F;{+6oS%KIE)5gis#wyUq^h$>fifOkO+w9|Pwo3A+FLD6DuWhq% zmP7isSir6bvcrHhXS>a=wtLnokT&fA(b7GF+RV)^0BX#*3$W{}3#MW%n)Xl91!Qh) zEqZ_~okaUP07qHo+{`Wj+X#IMT66(+eHW6Y{T?%2fYa;(c#Xmsr@fjowW-nF}nb@PXB{lK$g~xUjni;ZtMWEkdXG@dLE5qAGKxE8O)E={%d6T<_a78 z%YU&S09>85-Ito}{+HA4vwb8q8~%^}<*%f(HvBTP;dhdTpRHB5d&qCT@?mX^E+_p6 z-~9E9U)es`A2T2P%FUMl3u^i1Sih}B-=AW!udGwO^2xHFiUUv=xYLV%1qcHu5CsNX z5ZJn*P%py*xJw8q_0f7U77zf_0PFSwF6YJtfU;p6ML1AW`>^UJfiLeD39?nP774IY zEU362L5>Ty;u5kHT+l);{&|`J6u7WVknEs^k%PQGF%SS)1kx7RY&OTxvZ0P4WrJHS zC663Dq>7xmhJ;gFeZ;R(prpQNGEyVOfWl!r%I`U@pK@Url;soQs5m^@4TRLMJ!3Rr zAreqo5Ssxvfdm?~1Q`+=Q%03oBGe`?UU|TyVC?8tI>qb&ph_0OlE&liXBWJxK`q6I<1<050S z-xrjnaDbdUsfh)Mm(L&}Mzxi=E~eCpwFL5k*^3DPhbIAU;* ziv@Ci#$_dR^WnIp3Gs#Hh@61F5W;{JPc~@GAe2aL&=3m@lVQk9jEdpLgzSf)s4oaG zFNBH_Px@3$P>2nJVt9}oksM_P`H~667f(oVc&FS1thg|cbpj6;g_t;35KU1GysgMe z5a6f!%VxzKTO@0eqVPuJsW?F~E|^kU@{|3>Gen{0Ci^Jv2tZoc4hT!sTP2YPls#3- z$UA2B5Lz3{E_al%F!HS|8MSLiBlkey~v}cxgpB zq~Hgr(on8)Z72Zaq+n3?BgQR`nG^$#tfbWHa=Ki0G#^q6F1yq2bfBGJwAJaVa5|i2 z6~qrpb~r04%3bzyRCWvIaX70qt!9Xg$Wfm}+Xn)D$%~E}AjFWPP3tAOHUxM*ECvAD zq+(KSa73+wN4>JcT-8z&v=@vH7*qHb4S-#+CsG^syf|jzqo=4+V3-^X0$-^#j~#7_ zMaEqOY7LMk?@LG#IPz9Z0PGOHxr2F7v51EJP(|?Gqi4gC5l5xqPt*e=COb7FzY>Ec z1z+9)*y)cY;|VZ)M0sLI!?D4L7skh71^X@zNK>iTax!xZnd=Oe_yc~)A(h%4wm^l= zCYDq<%565kD-aAi?7=dBh0X6MDa&Q_9ETe9(*T|i5BIZh(&+(-KAay;tPk(1#f7aGuC#KQIT{SD}g4Cspu=&c6f0}R4#2H|#taEC#- z(;(c%!g;&5TkTdyaSec#X#}3xiVw%1M`3Lb{rt>TqjY6R1P|ALiPKi*kGD|rdc;)pL#Bf-cl7wy(Hoi$P8;wbV93_)Y z27VXR%^5-xUi)#Okj_UVus1IjFCqgEF-qsL()bG*%UVeRf>@&g?=(-IOIY!XVZpB` zv49+f*S`>NgfU8_68Pbxi2&etEru1IL4 zQ9S$#HXkbznrK9_jwE1==qP6ZL1@EBtQPR=z!c<_qf&rQ)ZtB72k#}wiLs!Qb-@;p zYK2P$#;-`IU;#Ym5R{=wi54MXos#lz196tQ*aUCqL}&mk9<#jtN)jpwJ{yo}BrRDm zrbI~b@_Iu${6gSY!r^2Ds{<;M%PPiUR2mK|^~l|B703%(kONSau<;QNq{muwScRyR zgx7Rb5rScHsuD{_3$QN$#%cipIFSR~h0#^RU_y+OdlV_|55abUBB4%;gr1+lJ5 zA(G4(Qwrl3F{w@B1SLeJSZ$-A1l4e%FrXxB!V+r3WyNVntX2|05TX~f&SVUBNF<!_q!JOlvoFpS_hO_nJyrKHbX5VGLUM( z#^(nuN^x0)UutuJX4e1)45oS^Rtr`w?o81%PJ;%7g(Bmu3B8MsnM>Q+J`G`nxEgifbV@g!^6Z?iD@ow2t%?Bo|Uo{<5 z1EvN{4VW4*HDGGM)PSi0Qv;?3ObwVCFg0Lmz|?@L0aF8}222f@8Zb3rY5>mVWao1( zJ0*vxV**o3`h2(^na_t~7i4BT9}d??`T1~&@o0SxEJYCTdwDqifV)S3?rVOWmgrGd zBJRxLKaL6a=hMgLU#LDhdT&!hBC}o0KPK!!F9f}(=md6at;LdJKq*=Vi$#C_DaRME zX02zZ`Z?~XfdfF*n3g~I^+fr<<2kmB?<~HKx3iAgRbjF-*;U8&akTe`?=aBIvDFXr z_u&4osYN$4!%m;q%I~3(qZnm){xVC)@1aTGH+{D5J=*fVX`U?G`xFHO=v&XE^B z_NTYq%D{tv-qV=t*)OmEVU%);M_IJ##)n3I=jQJDp%(&6=XUpSchcR5_pM9(+x^K2 zxmQiyxz~Ny!l9+>|M0E*x!kGY{BKY6aF^?~v$!1B+R=N6^!E|9_g*6X{Y34(r-BbWUiU1gAaD3#z3L90 zkzalMqdVQ)edMm|=P&xhJMP)z@*X{R{Tpt7@tT{S-t@A&^4-mYu6gAJ)jey~aYg4` z{g|7(Pp#U0&Y1f?b>H&!QxEJf+3xPS?(L`dU9!V{WJjMn#WNmMU2J!}{UU}{>qo4s z){mH2tsh6qT0dfJwSL6bYW;`_*7^|(to0)XSnJ0zx7Lr)8m&K9=SPgSHXi#XtsgPf zT0hpA){hu!tsm=H>qpGA){i!+^&>`F>ql%f@83F$`@J%K^tOvvdFu{8TNK~3!W$i0 zz2&pf>%5Qu^}t5y>eXJ)>akrX4ZPbMIS`R%Opi$< z%#!VWmwHbc(|5(K)|K9YUzG0wX>t%)RF}wn%`gJ?fuMcj)^@U@~%Dc{I2(}neSaRvHvT3PF(Cg zeb0b+|HQ1S3|VLwnKPBP$?JL3WD*@)<3J{qh3$Uf_pe9+77+o+r%X;R2MjdecFAQN5XUyxko~6^ z#&_VsS;Zt}+D{Se43Ei^TL|N;>*_gxA zM}3+Y2xMEY)NGg^S(8VzHH-Ku7FI2erJ;e+H8xV0qFLjXrY7p=m0C!>hW|TmZylY7 zeNsKZAJjfN{+@$d=!gY$j`ti6+xUFM3BR>|d{b%t_$Jf(tvWyM@zTcI zbbj2ErX8INrXxjK9T|DkN)AGl?ep)~7@zeSR@N4g$_Umc290i~-jMpo zU*B4`<;f|jK9}#i<+6tttL}j<3!fV|d#Q)JM?RRheyw$)XMwG(Z1cyrrlJ#m=oxx* zZfe;X^R|Duq};>ZQ@8$e>%&_nd$?=;`-a8M%?mx-=T=`=QZ(0d=uF@2t=?IwXHGro z)Zq)JtL~-O92eW>`kiN8pAmsCYnOVSI{h|rY|Z@CgLC^Fxcb>ysoxLW`bO28_omiu z==Sld3AcKrs~#A*q)Q^j-8bGp{=4_X!&2OR`^d^Q=RGfa{?+}W6_fhkk*Yf3=Eu%F z&YNl)^{bf|ca3|v`}p}idh}U)XNtQohacFmHE&XCSoo4_pS^gIr)get((^r+dmgB- zAHH$s9M#?P^1*8k*M04N{B~t)^ZN(gZ>_m+TO{y}`}HdmHUIeiVfPpIrv}{j%|Z9` zgKx5UcYNc%Zg;Q3A!mK3y8U~6uw457fSbE#9=D->#GT){_swh$_Rjm>z4y?KJC9eCfFH-?~p2nWM}Ne(k=x;nNc~4>{;wP-LqX#L{Fsh*#?cD{b^&8gIg zC2O8sb$#0X;+~4d8<(e2%-!>h{K)JH^HcABv~-1U@A6d6LxT@*la{7dPI>2pDJRZX zT`*zuF*RUn;1{Zaqa7C+t$(EF|MY^+^!)#j!FaK=kDHHn9?>u}^Z5C{RUd`3Onq43 zCPx0fY=@~2=yTKhm@~UG*a!WdgS{Yq1e}nx@#ND&Sqkr9KMZIE7F~X4+jNC2x_VyQ zbT$?};jXsn48-}#gZcfwyf<8p6DxQO*3-wp6>~mzb16fnQF^2KJ1=!1kn#C1p6Ow@ zo<97FHrLZRzcg+x&vF4+K|rG0^!fMWd?*Lf;<`LdlWQ3|XvEkd5H@biRp^ii&X4%v zU;5rxycayz5|jf;zbuA*F*w}|;*H+NE%&<-qx*OqcvGDciv>xd_ydg;pNU`kq z@Zz~I+}_H8XWHJnz;C<{n0)zH*8y6|&*W%k-$T>ssT^=x38{c=^_X8=HSXJ9XL~ZG zN5yAL&&1HkPw_F#9NU+k*duBI>y4*xC%%RWke2^+qCZ$?iShH#WN36o`s3{)U61s> zdvewcHDQuNHa$>1b^$$l2z`!4%ZHW)BeL-!iQ!+GPY+U_1l(mBW{0@?l%Q%IM&)X4 Y7gVpTS$05Px# literal 104304 zcmeFZ2|Sfu*FTQT%Fuw6u}~^SM8YCT1ENsUs6m-ZY1Bjm8ptejX_69>xfU`H84ogb zjx#vNbSOone%F2Dem?K>-1qZ;pZEX1zx#Rr|NDG=tZT2muC>?xUVE==4{Kk(8`iC# zAt)ut@ym~&pM#r2^w-O&e?KRG$HO>(Wlg20^7l<%OyqKK_)Mmsa!p)taxCZgB~Nma zFFL9JuK2_q%t^l0R%2rhqVy^KQ*<($V+HrbRrn+${L;kz8z$44t`lkg3BqyY?!?84 z$@G+-zv%x?1U48OTKr=9zRBmQJxLc`n7Ei~R~EA->bGOEn$tP7f4$=5;NzIh!Ovmk zdhF=QLmV7pazLbh`5AC%2ytlqoc3$t*S0!wMf}e@VZh13Gr^lIk8`pmb8=2F#3zNP z=rkh#mz+P+&qI{@OFkDD*RKlrC-qI^oHU0cc~avPnd(tf=|7ilGvBg~ljx^MCe841 zn!w?YtU5cH5$^oY<)&)+)a9ScP4$0{w(;s5LHYSFLlMw0NWztmimXuJ~$ z@N=v;G~GrxPY$cC*1wQnGyb@pB@=i4@|$Y06DbaksV6m+{=ZBA5syW`8o)WxO@6a> zvYY-xdK$-n7e2+4_^kaq0FL}!`hPY#g@@#1F;o8Sti;4q-|^e~b;3XWynPTe;}zi=g_Ty@uj zCk=D*Kh0Y3+x=7dQ`CO{r)ArPYGI1DEjOxA=lA?W`KkK|zx#W?U)uaH9(nTb*94LN z8)q`>7vA4wChq!GpRxJ^`koWT1{39sT%Mu}ZPV^*1r+_p|0SJaQfv^dYfDg0{Zr-s5x-~)rCV;=>wl0BKeK9U>fLw$z+V@hs%J3oALaAA zOLFgOhW;k+ml5o{J2cYZkN7u(guXd+|08aH#GlqGyx`c~e1mBBr@8zl1;3ReJAaR> zsBPJAX^vliBbURkyYIQNwEQ>zFKJ$CxxvAis}Echd20}E>>QeW7AMjUlk|7^zt_k( zIr%1v;ZUCJeFBs2sBDrdOpa1xCMizRyh+N*GogTMk_t^yOWp~7)FiE(q*arYK1msq z^uVMc$0q5^NuQ-HFrjDlB%L((e+`+!7l}Ul%kRu2&cc)U?wF)+CTZi}(dm;oPtn?NG+fUM4e@CMx(^K@vWSV~x?^%54zvWK8?0)fE6VKp3_kU`fF*%qd$f93w{LhcO{;TsH{z)GtI@!mk?EQ1c zq5n_o5Sg5apeK`^u#GcMT^lrp99i&nMCY|I$;LRQ4D7w?klR9U^bC)f!B- z-cJ*zPL0#UlP1zTCTsAwbqJnc*CA|9xSaeY>yOq6rkcx&J&Y|f4pjYr?57hgY^u9Y z&E}@)f8TsaQfQ(IQ<^6$^e4|3C)Oc&I1DDuk(=spQ|A304W9&bfLLhxXTJ&WN6fD! z{+Sd9$I~e_JX06HFF)lc+-k z{j;n7yXF7SVzeomIEUrmLDJCEMcrnTVdxVt32cXSZevJmXnro#4Zy7z2_T5;dzgO0B<=n!$w zCE@ULDvYk3r8zv_f(G_G`qviHLD_a(X8#TWw#)Ayb8Iyg$~J0vHH1|RG1c_{``t`D}K7illLi-4xwkOQuioPVFmw=jq&6b zR5!J)UrVCH{U%{i8^V6ewn3X3l~z3b+(Wa!1)Dd{ci!Gk zhesoRHKQA;u*Ae-!%ps2%(5DC$azMG9kEZ6(Ju3a?vc>cvmfK?%W@C^9;sGcUU)`CDJoX|N(hBhc5Q5O46T&vUNqLVJ7FQmrBy z@HdYR(hn7)_x%L34TOB^k@{1ss5CfrIXV_D7h=n6y_sf%U8uWNpy*fx4ICc2k1A*v z;=8Ap*XqV}q2}}T9);sH*fM|c`8u~kB=J~A)2S!VU3j%2H^;yW(fJKG4CzNfZ z|F8@9M7y!r!$i5DT5DZXh{BQlA4k4)VM*Ow<8FT%L{k?TFs~P4Q>d@&rnoK?aoHN3 z^OXi7_DQV3jzZK}n`B(l*M&##scYR1r$NrLZ>r@Xg&2BBQfOO57wYsyEItKA2h}O?L_lQge|4w?rBx%GBEd& zt~>?gqXdQT33Nlo<&zRyLaMM|T3m87VSmW2xu&v2eHSbn+9Ys*z^9y*#w9}m&NYI} z={((#wUYH#&8G^NY<%&!*Ob7ap{vrI%7W!J{aV5{RhZb4v-Qq$0=Fa{jzgm?sNF(Q z7`#CO3CYW^x9({GvhK-+rMr3|leeS8^$H0T)zaitb~J$I0JBGCZ!e6WI7~JTC&AMV zjB8)o$S8#ciH(}#-s_Vhxu%#1dnEE0%W>3Kdn*8naMa=_G_(BpY&<6FYc zY3wMvx?KY-e8zkHi)}ABJdJG?eoKN)KcBQ%ST;b<%566Hta?GM_UNrtqTB@mFQ{L8AibF=H$HLgQ&XD;Af0n>HnZ!6kU6W@Dx4X4++az4E#f0!DSLd9#kXsD1HBUcS$H|0wr?plN?OK}>=k4ZMD#K*FbE02&HCH#h^9ab`|6$Ubqc zh@_CQHp9Aah^SX|n!!lu`}dIkDcH3{oPrU%6Ruofvq1KU^mNg4@1eR{eU?oZ8AD3B z{mN!{L+*jw^Iy{m-0FFgH+&=GwwvCc=5uy~W;*Y1zW;l;VK>4pR!&B}?JjICrEVy% z+0@z)-3iBkEUW3$&xdQ~Lxx|Tu^?giz2CX;PME{J-eY)mK6qd5GM@gD1zX7(JW27L z@NCEYT9JeKaIa;$y5b`ij20Ikw~Xn8cT3MK71^5)r?Nwq8YHk_SkvBoWnw2Jo5_i3 zYUjfp&!X?w@3SCom)iS=N1ad*7;I#?G9O-TFE}1g=!tW%*4#z#cl7M@nK3IL^7by; zvF!;9tZeW6_z~L)6?z833x)H+*}a+MoJhQPR%l%PnAQngmnv*eJLbb;chaNn;lz6+ zW9F97h)%HIeb05=G9UJ3trPTm&H~-R%Uu5HM12>(;oHHNzYLCV}OJc!m9gRYjgigS?n`CaeeCUcy(B^)~g4GT@<-LBLu*KHBh|QM| z_KQ6l`7>Eyxwc8e<4Gqhc=IFO$Sofd-W=oQ-5AtD53wtaP;Qysv+=>!)34FH3p=-Jg%8r%|NbC$G#V6g+thR z&jlY`2s@i+4laMlK)+Kqj@|;{aQLgu$(DNzm}X^ZEMLPw?-rw`KC=**=gPXf+?CMp zx$b-jAz#cM(lt^Hf&Ax68OtIV;KrRs#TG)|%F9Q3Z3xsac3r#g00Z)z8}AG!F>o)u z;5j%q1ai*fheCe_)Hu_*?FjjXqj1?;B^T+0janTgg;tt)JBQ98rna3ve(IgMMkC=_6b_URo| zg3n(3?eW#REc`I{LxN&WKKePx-;ub%2AdBVhu7q1q5HPM#MOoQ*r3b%HizKbCbPyX zre|R!|4ZLdwE`?V`}3v5bvC^BTvE;dJrifECq-5%7NGj&I*m63e|6!j)}Ts4&lW%3 z{JRB6PP-tkeVGmSwe;()aj|NwSTvfs8^4@ z;=y&znJB+6_O+W!0Y0!;qZM+M4eQRh&pwx$jcSX(D13fWfTWETI(t0Wz?T)FcX?+v zS`RI*KI>S3K0PX_WtZ46mL&Y~NoF?s?tYr=!pKLpebOt;z1X0^AYZ$*FB=m+>oWEv z=A*Bel;>@tzTa#_RF4HVf*j{k&X3c{aI~xCqMZo~c%?+6A7hPvJNRuJy zNAgebkb#k@rx(A91#6-L3nkne!6p0L zVg@%E%xbBR7j0wx%j4C5xB0<41=-V%Sxi{dr8fBGc06329`j@qF`h5H<;1J5$Asut z#vgN3;$h^Tho4s~4T|QklPJnzLK%mq>5JtZTn0>NG}?c{bWa@Q zN0K

C0jH6nN3GC{P^=bLkV9Q5~GK6}=P1|3cNc1RQY0}LFeAN7cX+o}f}=UfUm)yo!fZqZ-bug#730=N%N+FyTaa#|TS)NA4Ha4O_3K_%+`R8%qA@ciR_ z7S63cyWlN173N6z8raNC#bbA}dOxYNP%}Wfb})u#H1#k1|783#J{`3W=+40b_k_%f3mxF(TfJgg zPY=voS8ZTU$-yS+X)BXXbim^smA7a2_ke24%)r*GIXJ}hTQq#P19&z(u`ui+_`5}t zbI#}B&1+Be8*X%f>@Kk_OS*gDO?H@PpmGjwvWm+*dYRDwW`XOOz8+x5nH=($$wA%o z>jO8P>VN|#>kM`e^nld!>cDM&Ihefp{dX;=4p<-UHt+CY59HT=U$*QzQLg=EU77tI z(B!4PapO=ARK}DoQ`N}9B2&|%O^-WZpWsuSxXvCB=FzaI;LAb33+C|~+&e%rbF6fX z(4YRACho7FgS;e9+jCbsKt+91dVMcZ-!n!5LNjwvq~U>Oym<#~`8qT>W2^@hugptW z!Ig{Fr#qiFU+jRRifPiq2sU{(P5xDe#S6)aXg<%UrX$=D~L8LT0MS9#3WpFkN=b zWTnx;s3tS=qiq+ide|q7?J026Y|e%6#D0pGW4iL7eHS`CDGKsDn*u|UXL!AH=rHr} z%>!drU3e$=V(l!~6gZ#q)cYZ^UvtOJ+2;PyF5LT}teiHQ409j!b5mZ^!Qsj>w_v+2 z6wl03NE=9oA-B~X(nWOeqlhZOg)ZFOS6}QiEfsWYMe`2C(7|%_oPLsS7xFwzomEno z0*TCIg$=}hN1>CecHhM=#0716p0iV6j^n=b;jwfW2op*=px=dYuZ9x0Fp}XQT<$tV zBlcxZx$b57cHza8yYfXfkXH@L#_|WAg!LxzT8X)myFJ} zt9`m~?oZ0L{4Xi6*9D@z1BiVb?lbGSH*}%e9HvWNVG4-7S4leDM2DCa6?@FR8nI_5 z)9|1I1uri%7+4#~1g`hnoUJ?>al4#v{%mOqE^v(q_6uWz!uCK9%E?B&5mfb@gW$Jm z=n7ceV?xZK8D_oCjVP{jPVAKg1(g;A1%<>g;Z@VEjl8=XkzYe+VYm_nb?aW)Hxha* zoJew#HjQ{IL-m@8JO%qR_pOwUX2Nu?>*?p58j+#z3+czRs2UgsJ z$@e!h@VdEs*Mj-P{^Je4BpJRs_)7Zu3UnE`%DZO4?WJ_sQmmGwx3~^WUvhk`TFb!M zpDWq6GIVIzaBsy?nL4=UKJ${dCIhRNSBlZ--S+;RG~*Nf?ppe%i_S+)-N&c8bQcJnX%@y>p6I(XmhGcB22 z2V7hhV)FV7tT_GV{!?W-$RPMm7%@iClq%62^!GsJwPmQqcB&hJ?wz*bKL6g<;qQ16F$m8_$p6){eRj&Cfx0F%P zTsG{E!!{;JUVPSPc##CMO4;)dnn!B!3r(uGnAj=^3FRZAvp(M#tTK1YJ+`BAHGk|`KD^VyHH#!TRBGU--7 zMuJ5uj(ovQ6g(?-=k{@PCd}-)@rs#4g8Xk>70U^J&PnH-=ft@Dkbrx~a&HohNEMYc z6Dhd=P{YFSM~L<*mlO2*1qrI2?ex0RPr)SKYfcZA6Z3&H@z zVm8|`VOj8^!bJo6023ni zMPJ@@i3Dd2w%;)9Anf@BZtCj^eu+=p!c8PNM+x=lN+sI={n3)mZp^>%|C90mH;xA} znwX1m{9la+`8USn;J*hR+P|8?IT<(kNAW^`#Q%R0A9Q`P{*&=?ld0cMo=&CzXX!t- zk3S8Mses9UGdvdk`T+j(=LP<|&S~n;n0WGQ|E>WX`MdQ0uH-~qGam#-g3(Cbsb;@63+HK+d8%enpyH~%-|XeJm^1yl?T;la+%8{Q zaxR$yJJjr+T13>snB0<7h4n1#D19Ej<|ze=Unk4)FRaD5gv9M>t66ySO_}-aSPGD> zx#C~@*Mg1xs&V1XEL=jnHnf+}^G={T=`*bc-KBOdxuMU(uy+6E$R`vqzh68}Yg;Yc ziWpb4(qm!Gps}WDJ_V}2aR-PF)L`OZ(b2{gEWBf4kmZ$00ZicbY~NiAdFA2LqJJ_` zQF8Uk70wj6bMdjM=Iwgi_&nF^Z3z=S4ldhcU`zq7`|*C~6iC20v`B()n28s%sg_*V zDZrDMb$jvNdTcpRXEXkmiDwqP|D3d&0-IliAKEb108-;KQk`f_T=&{fH0lloq@FIw z-5*4{F0cKdFc}Yq+Xt6Fe-qdG9ICZ0$(Z}b=Dr-$Q4K0Utk~0kV%djD><(@~d zSPl`-w=QasRt`?P>^l5`4czm>ZZl6m#Q+~yns;eASjXpu-6rJUZ(JjdYqG&V&6EE4 zLOJXym~-~bKtB;zTC~DR@f~{gH=7s8mBTTe`EGL8`@u%*lF@7ZO0akzHZ#<*98SJj zze#MYA0qNMWIGZ2f7T1BOCC-y2e$cK2K9D7=#ErJij-G?#Jys}V{}4(cUH!gul=C? zK90wvFCSyf(w-S!F9$2foBK+=`@v^e>&V-b3Xskf_a_aPgZIF~1>HUU!0cC&ukA0u zU@h;ml6!=n#QOM|SNdV#X#9|nVkHqbde!a9&U*OVqE)sdih<`&RYz9Jq7g;R5P649mv4)tMXw9zBF9iRhm$wT@T5Ow!oRU82BY%RS_wO2Bq?K zfj=7Sz-RTg%?^7Rc+i?RIsO9;>84fRex%oddce|1ht&*}6$$yiPJj;X!Sh}AeyoET zO;$oN8yIMaOODPRpka%Jb?Zi~gPT9Xx^om5D5dPAxIvZXe1F?Rv zr~1<3TMQbKsy8mi?{#oN;ePd983uk+^1j_YhYlZJR+#3`u7}V*4(Ani47^x)z;u2M z4fl$_p1EtN4t58e%3MRNPY9*iy7d1f_M?L17Ynaz!@4@#gpju_=vP@C)ZjqumlR$y z^;*-0$LS~hE~mHPN{6~8D`ysT%$xVAd1o6s_t`Y%l3LKWz2Vw!I~D@pmpL1@w&9h} z7rgIR5qi2ddNcMC`>1w%XDXg;!Fe9@CMHkW9e(YqSt|Zm**0DC6*L!bh^koaq zax9nkA>^y%%y#YH--Zb<&M){F*MiA>%koYU_VBBeysX^OhI{F}GwZ@zQ0!B}`matb z5aTp{p)|9N*f*}qJ$kAIS4|UR6VWpOOLHCVjqWlk&lE6%GAk!!*2>AfNX=`2V-F4$Jt(Tb7rZmVb>gnZfy z>3Jb6ENZ+cZTY$tW3M-7pS5bi8bdd8FCxxs;yv*HpZLcP;l+wKJgP9W_k8?wIX0f} zyT~sV83}zkC>7;{T>)0@YE zVCj0J_2KtQH0!VF(&1)fz1YjVg^`*6GM?Xl6g0)FG~mK}?-%wNG2q+ts?q~bXeb~j zdqYLG0mt4n%CFU8K>Y}RTvY}QxU+2+avyHMD;I9KR4!!zX?vmS%M=>Qyoii-*x7*U zbyfV! z9;pPPSp#aVY%P60g8}~YzRWFbreW_15m(na4ft04x|U}RaXvy=j=55a4qF9!B;vd3 z(Pgii|Ef6*INQIBYd?#SKXB#TzT|pj`q_Q&BG$1_&|f%iUPXsqrIXaaul30JX3_e& zY&w)nn}}4;pd;5lwH<<3kAZjh!kkn(Sm7u&)Rqp0Y#GH@ZcH46ZLH4^i}9q}E1h+1 zRJ1Qr4|(Ru#4Ey!dXL2vV_&dE?Or++8xA#8kQMB#3-Ms0=+=>&+ntKhW15}Zk_IY{RI1`lccR?9G*{ZeVq~t} z{KUMS3TYqoc5hqC#L=0VWINAdT=mwpaaf&(>4ryM+6gjI9_<(B?k`4{`8kT-yJ-;7 zr}O2CDia^iJ9zAdWHDx_S-c6;qT!I<`r&KSnHUu3(0TS-5hk1H&7Ema1KVB>in=rt zhtKutE|xFGSv=ul4>r>YD3v zqR}e6`*rPuMl^JvZyDLq2iH~hhcA2?jS==+D~3B8;f<8;(nr)jaK1X1Uo9~frMuux zZ&4$DaNQJop{Wm!oGVt?5)+HlzX@cy@H9a}E%n5Ywmx{f>0l+fA{wtBcrUKo(}->7 zllGBo`am^I>UkMPqlKkzy{JkPJY4@lRE5?DUg^A-=&g|$({13qth5oE5{!R3T}+C3GS9Pda>;#`wCyRjLYO(eyx6Y?@{PODY7MdDCZjG%1m z2MAA-PPppR2RG%7xzC)9L@CFgf#Jc;=Up-LKiA4dyw8wsd&3M-+i0VwpFMgDFz~)B`KGXT`sulGC{=)yy#b4z6MfY4H zPJ5WP?vpCH1R8e;=sGH}ae3mCq?w2F!Bi-2w{}elh|CGm3thm5jLf6atj~GyN&EKK z*aIanGk2zK!csPtBpdPsF!R81{gd#KZ6&be`}dHNWo*dL@eiAKA`foP*`6_*RsyMG zL2G+d*m!Jw)K1DF54KgFoFmy#0v)cRO+riAaCNsy^R$6HpjFwNxUsJk;ywot{xVKk zBEQEhg_Z~V&-)eYSd;=}Xl-njXT!1w#pL$&d2l>=#{Hy082E-pjzT2@&CkLra@ zM3vdFUUPubY(&U2W-PcZUjp~50@rgaW8;S{%uAhu`LN1PQhg1n7~1X^Ih<2xL)5ai zB^%<~vHgH!wD>GK?r}Pq$v4)CV%#;spTpYmLPdfEzYrb2%k601!%qbm>3oux(~e$t zd9gk7>B#<>_vIp^6JO^^ zIuWV^xJur%qeRH6fs3OwJe4v-ohn4cXXO|=<(+BA@FNMs*Xn6#nClX}*OH2wDkW=jG1%anw4ixAntPYs zXb+^JiuJRX6_2Q}QGjd1b(40yLmnNQn@2+?=L61@XQ?>oV_7jE+KvPBcRp~sN5h6A zE8@al6XTQJE&k>@?Kn6RV*ZMlWTaXA)QkP! zAOBUf_`^73kd3Pbny3%gCgAlkqZR2oaqw7#W$j?e#;*@*zGuICihe~ePkp#m25@~m zdT@x18$Pa3$_;#kr>~FDmP*C~U+gB{;caY83M>6CqVxz?7dhC6-6?|=0XElsN7(qz zCCK5>n@2du=DaS?WjD~eT@YjN0@Ov{kp2F#X~a6$^~#_Hx0onbsCoFESpl5U zjkMT4PDPd6`_oB89MHU+3_L1Y07nX`MythW*t~z8$_ZkArF1-TrL$lGSowZ_8X!nR z%?kPQ#Y9|>{~_0l_4WB+*tQg2@z79v=B^p{{h4^~q6CRql@Brw_49>)QcU>(N9=}Tn~nTQoYvhLyd>Om^b|7H_B9>s)3TurtK zLHS_Cui_F}Ma7>Xysg#9#02AM55`XC!~T_N#hYJHQR^eunTi%B7%5xu?1{*S^an?! zm%gTAZbY!zcpVeP_p?_?0(0&v-Eh|cMU5$~G^qpoJ)moU4!=cF3^bRy)2QbIp` z)6}7PoXWua6!*5vcC~P?A+|#Jbw4;gGT3!zARS|rc?GM^VS^{DU8B&`+$B?r833>#D0KtoL6Y$YjkIt)!siqoY&rIS<~#; zPposCu06<^j&eGA+CS&k;~L2V?3U<2{LXy7BLx4R;(P(?y%glw zl)R4bL_27N2f4YqF(A|U$ckU~qr%^vTdBOe9adK)Ea14#fTJAqo3iaF*zA5JafVep zY^}Hv#zUMd^xR!A9&v;Mw?4W)mpk4LzB*^4MQ$_TqRTSL%bO_pmi42Z!=W8Cu`*~T zp(nt-$>_{>3d~rzTDbpv8w{N9X|P_#fcBTMBWFKRFl|PwnOSWcFrGER7a;~{@!mHs zUrL+y=X1EUk298PMr;d27VnPUuM2 z`);Ms4)-c&dwe_1<( zn%sFzQzp(Q8ob>fL8oBP`xTMaob51!%5(iTF9VLR3aQtT>x7JPy?Ej5Cd_bVUrjdZ zgV##AF{iT&G3yD3OLbxsiqChPC8OR4rdAuvj`fwaUTEK~5S5=&iA66jnYQZ__Vjed1;h~Fi$8>gS%uZJ3fBqVjlW z7x2mR_Zp;<@kW#7MjNd*?DepFJ!swq$t4+Pw@S&-cipS^+`%>sA7QLCJ=6uYJ_-A! zL&-R&p60G^(}q#2KYt3a?Sk9d-}XOyPKIrJw;$$Pd6(2lQE-O zG=aRZ4b`t4cDHxw0=q-bJ%x|Rz!7{`{KK3!>?(T~(K4e8W_JmT7%it@%VEas%vxgK zWKH>aABh3-XzLozk}mKzyP_Z^Ou>a)c(whp72{Z1E6YAJ zV59x0*%M;_R(+RCFmGBb&W2=8S~Ra0MG+=|~sZmErncLCQTV!PLff?v%n4>x+X;`8|---bIFaN&bQeLAu4{TKd! zF8+MhQu4~HjlG?it{XaoSCNb^F|Wsx3Hh4^F?>PG>d<#MKx#X2Zd~%I zu)AzHA-{7L2k(haG_rDgmbrk8zAMGt7=%5}`s=D#Cu{Ix#b>T!;{1Ty%Cr0yhiDjM z@%32AUMikxNh(sx>%alk*sgbGG<;eYBkEpXgK|9!Un#HY!q3`sSM^!bPAML1Kf@F7%6TgG+Gm*>Kkh)=l|sQo zdNeesJ~!L5Fdnbo;*V5Fe1Zx|Rhe?ceExH=pO=M>I|j{hFH3xq1Di6sqrOxV^Zbiz zm1~pZaj(qfWAg-`;|jmF?+^OesE`-h<}+{*`%j$SS$s4X%IKdJc2%*_hIOduW6XWD z@uECDotJ>Qi;Am@28jK-)8;E3bOFhV^A^2G&4EJ3$x0@npMQ0_iEZM2bauU`HN5H( zHWkfFn@y}2Fm4v4tdw;?uPdF$=Z|H>h=*Ii_Bu9lM(GOWvJ)|yCrx?#>;fEc8{dBG zJ{v8>H7ktXh5?OyocFz6CHSsiFVnV?jV3PRl-IYOA^F^kk;4l4C>gO@S2>Z5g4LAe z)#YK}C1Y^Dpr`^?cFpG|*6WZ?>vikpe1XazjU!FppL9+_fg2Z}%B1I`Egwfy@B=np)@TgMOA7)AlD#a=uo6h) zeKl4ZY&<)BOkQpQ3Dtm;|3m@>Tx-N?L#{CKQ(Ds1XFrOd_?mB#5Sf@qn19!kKFh@Y zPF{hAZ-{-ya9!^^xfFP6ntuAG4-@;{-s?86DuxR)p6{`#qvEyVGY^FcJvz4wE5+|O zpm>bGi+3XhV&%%h?-To5bH4O`N;_5zN9Vq=g$^oic)}ID=p+;Oz2C9Tm5+o+k7a%L zd`En5$titjGQmF;uErg%RSW@%>lgO+P%-{Y$xR(6BJM}Q=a^P`1Lj5XkM$Gdn@g&8 zIbYS8$k?#wS*URdj3^=~2MU}DY`r@6SZYgq%_aqhfd~y}(7pCIL4McntiHw}yyS6I2 zP*I(owXf|x1$Offrpk$Q!fE@$-D|E=vHjr-AroFID%kA2l2}Q`Q`~mXY%Wr>Y`4+CGVt!Pw z^HoBgv-?W=wdoY1ABwp9%%|efR_E#cQk~$p%W^lTVkfYccZ*0ZBKXJUK-PdLhx7PA z2_gS#fXgXBh>AmRzlv5+C~(a?l_%4T_zsIyd;c^oV!t(L(a>r_zDrZiYT9=)K0BtY z=qyFWf4B9t|7m+#t_19E@XmtuyMnaHE1u!PV~M`|E7@o*m%lWT{u(;&7pW)lKHtv6SS4&nY3vLGITU=iM2>0E5Q)^VqMk&d|vBB5V;Ko53Pe$(>G&SiK*!+V1 zFXL(K@98gT&A>#Lm=cxcH1w=C^EjSP!f6Ai!ag$lFk5VMYh`*@**#Qz#wkt0TZ|_45`MYJ|4{|W``7&F}9uoR4^k?S?GH`>s z=2s`8pFXfxa|jn9&WriT1g;_Vp{BfXY?5^@-d#3GxggkxPlm`NeZ)DevuotFrB>3= zg{(-5B$3c~ty)|-xew`|x;^B6m zfj-pcu~x|U$i{=kBAE&iB$VkY)6+Z5z@53ie9ts!IFf#=CTu4O{i_@YWTy3_9(imA zB_<1%-WAvMtR>;$k8AdXs59{8lZvPVgddx|vE!2JDH1v!tr^nb=*M^Wtr__WSxCJd zKPPY(32Wc?vrjpYA%1_l#~E(IZ?<`gY9zOy{<*-Pc{O=xykxcM!qQ%BJmE-6e$j## zWBJ+kE@V)={A^AT;qTbWIUGxiTF}L6L@p~L4`W5e-mEF=MZ-$TkIxEP(D`D-EEx+j z2>rMuku-;iiH;+;>=RoscKOWe^w2!qe=B6EZACBYn{gNPKW)J$eh)W^ZX$!knyTsk zvP=}#B)>Qw+Jaw1`pe%`=HcP(5x#5Q_o581deLZ53!YV9@g^yZ4EL-o4^s(yxKf7% zI(HKBT(s1xw(NZTdTNz|5~&wS(*wr75OG*J2^=tVk_=)JDLa1>`mdk0O>SA*g8o;{ zN;A6i(Jg+zbb5O)RwTYWyimRc&$(Bnl2X1Tm3;#bEe>*Y#&F|V9uvRt8sWZ6&3{`8-ySnt^Ct00*C)-9Cr|z`iu&xJ* zun*2x?0fN&)6X-9*$t@kw zQ4)SzKKh04dIL~@EbQfR?!}AwmOAfF5#MVWJuPaeTMxSyDJRU|--}v}U#mDNjrfUL zwslgTb@#CZx|RAxjrK#l16QHNQ* zNG*D;db_6?I<5?u{#;NG_3F9SkH|fELDaoTIld9?3X=V82z#vkjMhFA=*5Axe)Ria zo1uNrGF4-*(e+GNZ#md8D4(Rq zUT|hu+C8Cd;2CfzOOv|u9;MddRN`J{l4}>K26F` zc}+7YD%*|w6_sM()r8F&k`+zd+B zvz`lIEJbxE)xsqK4^VQC*5*As+347pd3NXWGE`cd>eM*e3x+2$R@@M2hJn(QpLCfr z?7W({(Ks{#r@gm%&~48~*W1fqyqsT(yj@4UL^=Av;n^G3rOkx?yDi}v?@LhE*Yf_0 z+n!kR&5l}dgpCWAkJ`>ZRf?=Ow|aZ-J~#$uR0;WJu#fgow^Axa{Y`t!@{E%)HY(6N z&XkSa44?9h-2@JIRD`~a5Z@sc%4_tLZU$rhKE6SrGCX}!-R`774n|d=pFT0)lPIN` zS@4wM;NFX(&xU&8sK?{3fDO&C+%8$!HmwwQt*ySvzled$zSfRJkI+Hp^d?@;1C^-5 zFJpT`wHH^+JgKf*)&kNyrQV7ksYLJ|skbD~N4tIF%YXNk4s(?5YJNOgiE>S02VxiX z;;@Ydt_{V7EJ^; z*87^acha#qR`)%h9s^2V?9N|A*gt0UE~Htz7w>G6xD_1Q0yB+E>6)IE_@nQe*k?Wl z2JUz9zE7-6ONM?=*P2m@VT!$aVJ5ve|ATmxijW{(}X?e)Z;ERRs752)&J@7`M;{4s(0XX4q|=n^-eCU#^{zs zIr9bBc<^+A$L6OcD3hb}T&=Sj3we#*N1gi7U+DYrtLF2t^(nh3b<=yC@kBFHT7vK^ zO1n?lTq(i3t55HFO6YkxcYb~Nx&Ht4_@8BE@RL&O&__HYiuEuJP6@^>e0HfH2fx0o z>MyFrEi$U|qoWmI^3&YIirt4#i*EA|ovp({a#!Hjj%*kYx^QHsdq1`o#8${;*P@W^ zh9)_$3ZOT-2h1AjLmpYf_aUM6cxUvL4$8g(_rtgEjp-A(T~YQI&8WiyQ-MRLR#$@O z5oyWV;yzS4y?yL^dp)LZyLN>#mJVZ2V!SVI=tr4Ksd+ZVb=Wri=9^ktB_w!hjL&@2 z_ZR+uGX9t9wC;lUbNKGktgqe61oN`~!~r7S!8V>6tj%bK-kN}@v6pmgckG+vGEBuN z5&zPci<2Sai_iC$G~zo9s=$JGM<`#Yv z+?r0s^>gx~zb^d%cTT2kGW|?P^+mg_i$keso*)*q)gc+SDs%{A#G7qVg5<>DI9z0aWN+m?*S&>Y?ZMyHhz2En~-}k=Xd%O2{d;WOV zS$nO$);?>G=bX>k=i-2N@_OM${L$nVQ~I+S>i!Gf1$)@aarBgWh00FyK7js;a-oPC z>d*N9*!X8|*w>j_eHmNy1T+UFwc#1@wknfqi>MFO7F!9(X5)_>-_GpZ+=k=U4Lne9 zEuwmBs8sf^R0P}1JP%etv21nRZKmr_w6a`87r)=^7YJO zdK>N$JG92%tcaSk?2+lsskzu)^-6-ovNqgmAv#O$MiEsnaA^(Y%zmnLV7Bdxj5f?| z{MoMgb`iDi!SfaAw{q~wXIhM1Vr`h`y3ZTE;v#DE?y{qer&i+`!FI6{nQeI2E@z3o zw~DAy4ctq03I14>HL~;!v45YE*xg9ZH}L%}5f>AC{8nGc$b^(fv(UqDvq=&4a+GrboV);(;Z>d&|;TF*(Rt{wD?JFihig)QO%vP6H{hgn;9CBMNl4(44iTUm}v z)*j!iOvqWl@Awvb9F{wLPojy+y+gDkf7I!#nl!5W_RM878g!;Y6qZ?SBoSVPVRa=wwbO@i(6shc)? zTw;r@q6X8^lP9LcFRO3Ob=7iQ?%+PpWba%2rSvKH8rdf5LZ=(XTc;OLGPL|_Xw!aYHu0FGc$GDA}3Zjxe)+N~N5F=))x<$Ph~6&n0DZPcq>o;Pl9 z&cXGmds6y#7h=X)A2b8?+NjHVw|rwen-A0wMI=kiE>>S*& zFjOi)v=E<85%FdM%vrX`KnNc zYtKoTP@cC^pGKYSamge15$3a|CJjBNhPy-P5ex?;$I>&4tQ>K@1SN2k?iVq=CU2MdOX|L1mPj_+Ej zsf&(S_Y{=kL6y?jn_rWugTa@T-KMos&ov%{S0$Oaw!*OUHrcl*&Tg`UVo@t~nvC{L zWubDa_>Q-igYJK#Zjx?pUq$A@;WLxBI`B49&2l`nEZlvmvf*dvt9`7YI#NHKhz@x} zWwEeIv`+g(O|6g-AUBDq^C{On?hQ9k83OY>5;po$TbGE$DfZM*8^UaNI7Ggoe%i*D zJ@dgQ>e|=5()&1TsdGz~f4kpJo(q*?LChaOeJ8!1-GI54dMLC)-dUua+M6|3#4_X) z)pGCVBv~@=^Oeejx;@mkG>Y26s#|$s*X6JpYSo50 z%GL)PsrnyQJ^oneKpmRBeRdmp9;`Hz+O^~S8|v1k)@2v?>!?pNVk@1ZNjbG&u&byy zQk5iL`IqHcQ)~3!t6aHI^E3WGIQ~!P`10_x<-q5_-lVPUIS>=tKO=_FhK#-a?>Tay z+JcKahd&46!)%UsPsxGz{u|S94w!J|fQCf!lRm=FmfqOeMf8NF z6RgaIbHJAE#J4rX9*cR)MLKT|@DH~?yvvXSPt?v%+DH6uin;v7U=0lfDdA#~gvwuw zvr%78gOj&ATY?FlaZhyNm6bFg@1Q6=Cv-#o%|$mh&|uKcvNF=(<4y&|eP6R7hvT9A>i%rFGxdy#`RBy zi5-rQHZgtdsEEE(lV?eR6E1S`%PNzh zP;B4XKn|zHYUTRAbC{xl@4KA`&AzH)8VPoWN!IkI&5U*Qkue; z58d3N<+4oqz#;#HhnF=UKD@iUpmu-`a!>rH-Y0r#43lbehvo!NlmiFsr_XvFp98tV+mC1y`8u~N-Ct93AmO;U z3;Tl{IH>xv=u%P+IIl#K5wSVI^}%ZGLc%ZXclxZJngg@SyJDYZ<-oNWn$xI+pXc(H zQ!G0NezgDdu~QaHZB2wtb_MPOI}^d6Zoh67N`!mM`daU9PlSa6OHKshM9@!9yly0y z2z$$xD|Zonqs+^dNxKr^tCPSirKO25uFcoKVmuo(R_to%Y#!AQ1#!XA8brl?8c^d2Tqb%>pU!jWcXEW3@#-~MVC&{>w|spToJi}>&5+Fk zIUzr%4U$=K=%w(vO6e>pxMS?{X>k@9=vylItjmHqm%bc)NAkZ3xNmb3vVPSczwe*F z$6v^`80OMT1c@!lNgae<8{(3p>m&lBXoq~fZX#^+jgtlv$E5=gc#}x{rZe|la3R#K zzUEQSj>JFh-|exN>p{9dRGQ12L52RX<*cM2ms9}E+c2}Uc4GjXzoclA4*_6*Wg+94 zZ2=Iun)Uqd5`X9n*{jD;?*GI6N7;X@e{_w^170ZuVqP?`32wSP;7J3d7H4?u1`W7k z-tEtGqrur2eNlX!2Jt~{6=yHgAh0mjuZN^pYg2@nuh3wT>+J0p0%I6o64j_@_kS(*tOU3vY2S7d@PXN0tXR3=Og6Q_9(c(&b_RXU1EwOg3f|s4NVwp$Q>KUT@3%fo_?ia; zaZ>S%iJtv@llU7HI-GaPDz$0J1J4=r=Ck(Yfx`S!S2m)5|8LZB(exBzc!baRU{m?%KZJ zoXC%|2|8_5paPF)f|Hm56%t*xoZU$57}3^x%V?0t(v>Wd^5RsG@S72Nj`;i0{$-b) zc3kHh3=mqa(HR^J_jjH8aMC{*P)$&F@4u*^uX-Ap@gTbiVR#iDO7;^nI<;&fI!6$2Z$ka!{ zFy+|nHkot5;M{&EMkhKL9*EbUFOLX<-05_Fo<~72FzmKk$~OpX*C#*cjtYXgAvTu| zgam8%e0)_t_w57|jMSZy-AS_9$iD&gXtyg~5c)w?Iz-vVJ*Y-YBV zNDyQ?W>G3XhQO1$_&VFV5GcMjsQjK#vlWcV)=eQG-o|zR8{w}&$ZRjdH=R0gc#!Dd zoUuge21(zmsZyR=9RiEpVvfgkhd^-8^ZNG{A+SuxIJ2xh1j;WYoT8S8K;2Gx-zUu> zz>+#F8A9Yu2hOtj5&7@UZ+}#7Yue@VS}7b}=+3h;(F})$${&Mo?+XVJmoslH4ut>8 zpRk?NIl&?isF?K5=J{(6c))d1;lgtd;IFCuqEM??2@M*5Y@h^+W&(G)aDCt#~us=**ogByxL)~E?8_gpH3KvDb|!8(+`8S zl8h_`dSS5p*|5PfqcCXlUFmT2a2PPn>ziAk8wNVo_xT$Sg@M}n^IDJ0!@!nzw&FHo zZ<*yqJzldg@ZKKVv_K^cbZ6TQbQ^?$YR{9c`Kn=XG-&31Ut;e^`%#S{V_V;OM(#C4Z0aoQX!Bwr~2%|O(Afkc1~Ugp<4>x3jZD6< z;T0jktehp?x`pWNDBhS%c%>Ugr=tj;)GyBCwjl%(Uv{rOzmv!-`;46)1C-GG^cvVTXI8wfq2-@bCi4KyV=HTK_hgCsv%h{shoXzSr=RtsbZb^~FiWDa*5Hwc;) zymeo;8|cQBgj_LqgG-V@)3wvwU?o+VsWT`5Hh4xV1w{sc&DYJnmbU}o&g7kDa~=dh z%5gru$Xfw0yvb{L-~9l%xq8d9xq$(ol3KL=Wq1G_s*x3P4+((aveuLF837>G{%ZFo zuKvW4%DiHRzi=U?_3k9af3uE;v@~4%~Y^6OUYUhg6~4 z8Q!Jtf7*XcdCJi%Jy!5)*A#J)W!4Zif6;*U0c$Xr?y|kd)f(m)bwpgywT4wymxb&- ztf3|D%JTa|R$y9{rT%1<^$+(i#?6$QpB4lkt72Q~vx6Y)xmdpZvmoGFYf3So2f^K8 zgK5iBgWy~|Q@$KY|FFU1+ug@O5SaB$Dv#uwCwKYo3o>u+xhenNQ>3%RjR+T$r zjvvfa4?gsS&kt0!a}~D<`2qdb$K;EwesH8%fBz&NKVUnV5-~*R?dW54muY^m$$rh9 zX??!XP*mrNMEsz3L6@XvuP@Y|)G0V4><5Ql^MC!s?gyC7;K-KcejqQTayFRL54ih8 zFEL2?L7vD<*L#mc;mBO+7{(`|kamGn9>;~klPiqQUJ0S_ylJ3B`%x%NV?V?4I58CF z3>{5-nG_1L`=(aSPYs31A$CvB-VO!Bj+7>*lu+noUT!t{K`2D+dvn+;JruG&@+4}; zhl2B`z*ir#Lg7u*Ri}b`p}dyy-q& z#zN*HgV|adK;{`QyD9R9WWFf57Q@$|9|l26Pd^AA4TI3O#QB-ZVbB?T{-`41JM?lF zd64-iZr@fb1FJB&TDp2uzeX5H$6xsJ=vWvCdx!2`su%_WZx3D!F${x(GPM8ADR1C= z+!n9v><#kfYkCFkz2TOR!trniZ#eItOIzXM4ISR|U!`952KS5Fl^qwnfmiuN-bpua zm}!=osCe8PHf-Wv_}If64xQAiS#s7Jf@&iTF9j1CceTFQ#v5WjH7l;U;SJ*ITVuZ% zdBdf3#`vJC_Ye2qtf6nlEZBv*j5oNXG&Lf9=33FTpl-BbJ;OZRxJHz+N_4uUPZzQc zT_k)!s2S;LWNWOo??#qU7dHwqG$9Tnew_L38}eVg!lSdK7wK2dytF~_8|o8a&%KIz zQCL&MVCKF)bnrmar4GFwbabd}lF#QpbR#LILiidFFq>L(;P-w zEN14eA1PQVPEDN7HiU|1w42Y6pkS-Zu>z^rdXUgQcH06E0~BQ@nH*Bshc0}*TXS0U z61uzlOX3d6UUbxT%59gBW5|bjCNwtop~&0ut|B+jqu`xUik*UA(DG_p!peagh;{0z zfC|sgh(2%rl!wM%NVeNAc?;z;YFFJ5o$%rs()M2On&b2tOxecmK`- zEt3Rv+tBRp#GLv2I?$xI-eu>Pw4p;Og5R24JAUP_r?qU}6|ZVEv_$G!nQSTIpFQ~0 z>d{o3q<(HuwHgEY?&F8_Yujs(vIVR1G`m-bTVVL5f%8GbMcP*-rGvhjBrULIGYxAqa8K48NA=VtrZD!-bk}!>_FGzrZ4e| zYDIO6b9#2UyhhuuCT@J(nT0yB+8Xse;po(ED@6l$PxL9}QKC}ZYjo`6)1%@F*=XpM zrB5k$1d>~_b(PQtFEme`ePOIlHrnm{_Ne93Y!nusBD+C68%6B3PS{qTg^pRJ&Ugmd zh+AauXVWiPD0Ic7q!hbsgs1wxWkuP@G)?_5T}}`$V33NnN@2!dc}|ClUamom+;1;d zJTF1U``VqG!vyiYfF~cBfd#LUoPLGpY7I(Sm2>^!@=^pVBKtm+v*3p1t4&XCV!`Zs zf`R_)Sn&I!nXJ*O%;dX`wHvQ5V!IMLXYkm=nZ&%Kus47h58Q_A-tBeH;?B%k`X)AsZw5wG~P!GU*Y|LQZAa^ zt*tIZqOR=qRf~F1aQC|>SJb+ZdCO7Wi;g`g6_K~HBfHTTSBcEG3wqJD@O>K-Bt9dK zoa?(~={;!9n&#UXRo&>*Z0qYGFH6xp0rvZm+Y69PqpMe8NeOz=tF?gFBNwF~dQz9R zw-mWXs;hCP(Ggp+uBQ3yQj}M(uw3|4E}C>vrg*>dN3`JV-A5sE6{zO0Ij3dGN5rIl z$>%QD2c&o6`H5XcHAp6n-&gW#CHfYtdsu!~4Ju>d;A&0#fGiCyrk)b1N0koC70lWn zq5Z{~CzXZz(e0{}x?=8TD5Fv)#PaSZq&lmyDr3PTR2qqbFShoh#$qE4_LsIusz2g% zmsKhf&$;#Dc+OQMw)$G>*Q%!|W<{VltFSv_E)`4jd6$lg6%@az+jyX7OjOe{o^*6; z=z{UI^E_b48Pu5$xZFQ({ zilypS(N46O}h6*s})*>;AO=+zvnC zgOk?a=~H%xuCW)x13}ReaXzcCqRmXbvwmVYeiCQj2DcShvVdh7=h20@Cd=wrFMTCW z(COL|w{0Q*qv!j*{@O7cCyKXluJwHzFksSAmU_QlEOUP&YPh8(Wn^7&IpQ=5jIDOb}dKy8?w>oLkI1% z3NInEESacl@kMBmvu!oQ&EurKh2FlMnvDi8l~WWwFQB_CU0sH75vr+X58Kgt4c)V4 zl}&UlL>((kmQEhLiWtLo`)jooqV3by&AEcz(b5<4v6eAK=y~CcM^!-{$b8$f@L^^~ zJn-()$>%vCNTVbx!D?F%n)P@oJKu{WbbFgfLJ*w+-)L3iJrNLsR$S-2_OSXhD)iue z(=PHDtz{E5mYVk!9Xa_)<)vXFdfxb=%4%N^8Zdl!aJ|qYG>^w=YqUZy3gFLetlJxp zG&OJ3O04KZJi(V$GzuT1Ppm>ImFfbR_f?=e4e{W#Pw{a(kJlq(PJZ@7=PFUt6ybTM z+XQg&g6r#6sQ3n zxK}yLpcmCgOnYLusSjCIR3;yd=|w?Y0}C?`^`dTY(2VW^!a_Ws@T4b}{JwtJY&^9Tw)qV_;fNr?gH zs97j&wxHm={g*Wd6`Ih!#i?^jqN>qm!;i*^vQ;QJ$|~rs#v7!U?>u|n$p*BRk%8J6 zRfS4kKYte_^a0J#m0DVtP>u|ytsh>W%!74Y)^!do>rHTv&>2 zis;w3Jos}#q?YFE8jUabqY?~tsQJ%`^s zUMwBle%1Q)Aevv>!~Wig0Xr%Q)AVokqf(xNMQz>;_+!x}f0dSgw3orx=0WTblADt6 z!*%Q{vJ}YvwCnH?YBv!-_HgA_^l$WCko)6f3+LaCF-CvO{*$?Xqj)6$j0c6nMj87O z4iiI1*3WqSO*~%yjK|-^2rQ(C3bCGkMDO@j29Wk z6w^PCXZQ^w#yBQ$m^z-}H}u%X)1#;ShQfGq$G90=_P=lM?_2pr`foH2e;O~<51p6z zD14*E5EB`TFN}woiy}3e&Ol+J2$0cI&uXl+lSzXbt2>~q=A^71W*SSMs2zX5kzXEG zNm&uSbXE?eAIWmzo0|ibrQ&WPfuAD8mv zfKr!GXp=w=l*ue`r|hReO@aBB&h3OQ-o{*k_m3JoMmZnnJU~nbKN-pk?2q72xhY)`lI+qA{Sh{saBEL9mPNHpg}%U z6@7^4jp83S(V%nR7AfD!G;lLhT3;nZ0~s6rgXL3bqxi@3!ZetA*3so0Hw_L)$k#p> zpuyHyLDt0_G~iFyuj3;5#D;rrwlNXBrbf4R5I+spz3MXC`z;%qo~o7iu+f0iD=frt zC>tzKI_{Z7?5x(?*v&-JrKjGhjG0ExXVTIwR|sy=(pQp$AnAZN>MD%2b2it~0!T8@3g;#Cc~Upyr$q&T z59Z8jCiHtxv^5G*syrzW5<7S^b9xHAt%>MiVNZbpRhyXdsVQUl$7v~W(=kWWQ#1um z@(e}P%t(QSUwSKJ#8Srak8R`lN3N7H{3Aok82)kgIR3G39REn{jN%{b#_^9#MDM5| z=PH70+}PLBPAB+9m9{%kHH4N1M7G9E&4-1XmAUNs#_^AW`EbH|0dg^aIGtWZ95xVb7#Wy=Tq-Ct+!nwAf}RMt!P2kB5eCHYe~XFfcOeYX>}xJ+H>_kArW<@Q<=D zbKv6cN0*kA=D?8$-W9HSWB5naf-(GK^$U`oL0!0v$Vc#xt_086(_wUUKEX4N;2-A` zKJo-lA(h}ANAQp3xjFFI;>wPLMPv9!Ozfr3vWm{dGBZe~2BsYL&!SeMLf^F+{Uv_F?3lSm%XboorTNCfk*NUT_%W54b%Oir&8iu=Z9R^El*))~h?5}F|_SihLyCI9!H{%6fm{Ns`Uka=~t zHezJ}@a`I{dE*-Zt|88Oen;__f#AMe;TYkvSZb?5E=-0uiE*T;3T`1Tc#)x{f@rlS_Yvyd^-s$G8edElCH5pN(zy@s%!C44m+*_bGodNM@|w)HOu(o39v|P43AaQvWz|q7 zSYx^S7aZo9AZQb#8AJ9b9W+ru#>NZ=5k?8D39^1DQSr#h>V4C9Xbi6~Q%r z*(-n^y(RW4DorAZUc{u723DdcU9~vhi`cbkm_}nH=__aRe4S13kOxnmFpVKN%BZe6 z^LHy#!G@!CucQSP(r=2{yB(rJK+Nd?OTsfVoHCHnrh>NU+-J(?sIaFg`fh+N!Ass& zkf?AO$3H4kLGG;m`LHA7_(y_!+)QgoIB6a#>*iU1CpO5ar~v`IR29O8O2}9jpHv5Q{n73rT#6B4cNQZ<;rkCH$U!7WyEuLRbH;byr*G}cjSf$2Od2{zAgayA31SkY93M8)jd!VADb4 zZtOB;#RX~eZu}-SR6$C;8=sGUa)e*28*8h%TBNCUv|X+s#i<+n z*3G%vq1=r-iwab0EV{?9y8rLK|0o+@JZZmlJ#O=ScA%D{o-$cV{yGrJ^6mCP34+teQh{1{Eqb5F0x;}@zddj%nFrZr!flv7N4UUoUK<{oE1B!T z(}u0zC5Egg`^(p-Nv-c;ZNm>5I8O!ew&5)WPpubEA@8ZLJ^G<|MjJM7J@+lYyA?B+ z*>lXA)rPH1coo&Yw&L^b^@rm`+i=Lz?8z&++OYej_!+Tl+i=FzZWC{^FFw!9HHsW7 z+kUwJ^)-(kpWf7nhqd?YD3EQ$tIx5V7TMB>73CBthPxWEXj{C-Jh?`^&$Re~v0@_@ z4;Wx?;jI;BlSR>Y4oHARR@ITsr^qRMV!I#O0C#o(h)tT z!>w3(-panRd9AoDK`Q!;Mk}tBn*Wr#j(o4K#52&xtQD&~){>!+eeA{38uh-OYQ?tC zSDmR`(TeRd&m}|~wf?F+x&##h+vzlkvO5+8Ps{t;Dl+G2wsL{&(ZW_xe^wqI8;UJlLzd`51R$;rok^#ha7#yv0hNj(6ch zqSIt2*>>T$6bJEE#V%a1JA{v!q~AYXOxC{Yk)CcBR_r{g6Xe*1 z{Wj%_EK}&hK_`UiQYKyaNBfT#$2>DE8t|7?zhez#-2r03rS>PtzBZS2aN!OM8ne0NT3!EYFK z9A!v;QTuww^z|)REn(ovCWaQQk=4x-wzLJW8<4V}|Fs!YY@~}%ind_BfllwqtS$KE zgFT98BwMgt(3-B)=6XDRfqIzew|boZsmShbYdvlbZEFA8Uyo(OdCi3z>#@}Krh{R< z^|)ilo}QKU_1Glt*45H4^;o>+dS+A!S?{n)Iv?jD=l?}xAeXFr28N{1E3-A=`^u3% z9xv-L$4bW5=Nx2R7jMyS_o^P34ix8pG&oADW59oaSh%{Y^*YZrfXGnTyT{_u-;Gd?w`Dy=ZQ8F!}I&lqxT#uvK7_n)QI z6KW7;2SR(f);vLWu*^(*e(wW2m}4TaYswBg5V<0+{l3Kx)K1lD#5vnR)wB!?&5w4_ zA@fSl#Lf=3dHK%f@U;gynr8T;R9i?tEy%}`Zx2RQJ0sGYZGrh^(#wrb_Q2QFtavrr z7T5+NGdmURp-_L{VqrU5cyaj-orS!gb3dN0c1DBXl@)Vh1l@vQ$v$4wP0EC4d*&Q^ zJqQ@N8Kyl{Bku>DHFaO&7X%_lI>mer1_8$*vGdQ`h~C2jowd8Uzaxuj}PE`-8mB^l8_t{UPwm z%QBZPf8bHiJ+!^S9}HyE+KWm02lfvT`r04%zN`+I{LUZj`0q^lMBcaLjouxY$PfVB z3-othed!Nf8aM9?HTjeGrRwL(B>BU_jjDqSKl{Ur?kz*&nf|aY>hi5b&HxCq@-8zX z?_Y*+S!=vwu!T3g%XeCnc43+Co*xhNK`#5>Jm0>W;f@A?9;K4Sl68lq@uTH=p9q)fnKeZvYsiVgixVX6@65@T~%=5^n>EC?e zaD#rMPNeUz{UzPJsTS%0ceXI-T3m4eixbg@k(UF!yHg=)c-8?17iuc&Jaz#4<%x+p zVGdyY<>DT_EC*OwayUXc)&UN2a~h@PI)J#<&P|}>0ME-q(CR)1cWF<>~a8yIW4|jSM-4`{)@VRls-sa5w-JRF@S3u z+pG^i&<6*<+OsKp4Zy*xI7zu*A3FH&OYVQ859t@=wrzIShiJ9H!p*#f@IcaHa-Ip{ z#e2FxRvSPAOYyZKO!R7YPahI7fDr!l&;FZ|DDF&=L2gkfzicYL*|_&ta+&NZrXKA=&UNCe7$H1XXdIGZVs`8 zb=l6dr{A!I*~$rw$Ga>+-bPEH`h~S+I$DBtFZU+~WlIQKFe5Fn#u9E!S7^JM zU-XUh^(dGnS9y=en{n`mUixU^r?staG zJJd_-J~{!nnZQe}CC*@;9v?a@#R+zA=4@}^a)$EKNvg{Noq%`9dzSZFXXsZipo!3& ze%&8OJNPfxr=u+8w|TjJw{{)ZRDpB_>1A2e9bh{wu`WYf1+G!8ri(w_0adBa>F)zn zz~K`2Mth4L@O5)>;c~(s`zHMKzRiw5?SJTa;oWBHULZcDD`dq!F9_Oi`mJ+^7u*c6 z&{=2W1-{-^R+|*PpzJE@^VQ|IJ~-YC)R-y0SreI+&hMq%bEV_FxdaesL7Xq)Zgp6&_hUrugjcVb&H%;7kfflMAemr*`9Ekrgb(=)D!qu z7X(uudV+`2j)f}xo^Z%?n6_Ta2|l)jI`nUKf;0DpAyvf*hU{)tU)tyddzsE3ZMSs- zfh(&^Tn{?IVd}j!Ek`Hdmu6nls_z6}-^WRB_H%+l-hQ480V3znkOR@O^*$qx_#O*&p@yz5ai?|36#) zpZR}A;AaH>@d*69t{IEne`_4RpHKcQ|9f%#-v00D|CIlCx6hyT|Ie-e@B8B)<^OE` z{84}3>;I?w|Fh-)ng5^LKYqsd|JV5b8|}04bu7k|AG*)Rco`^G7RH8#`WE_n;|Z2V z<|p(mw2UWseH(o{Hq$lIveDPIGPkg#P%QLy z^^I-x|B`Ub+(g^L*k0Sp*xYQypp}KTjlPMNm9eQl(f$jsXKZPuZKgYxHj-|tZL6hi zdThKXTH0oMS{B-NM4x2*RU*+RLM>uj*VxQZYpdjU{0a)iz+BhbaztpLJyO!Y(YMey zFd^k1E#Y6=`6XRTSKCs5q*#BG&(hq)dL);YmEEy1DJi$LiIwp&6Ma&c(SVPhB*{0< z{^i8~Hm;VGiTF?Glk+z=wKg4zfr+t|wcbQzex0tRZ$=_&K3;;q)bnUXj#?cp_pi$J zo&0ag_gAus{Z$-Bt^Di%e%+tNMhn5fI99T0<3$?Z4u4C>s|43&U zD<~I5ZS;VFGK(^@uabhbqPCu~`R=V+GB#G*_07nDZDBbwmKkYVj_go4oq=I=ho+HT zn#L@R?Y=a2OrbE7bfMW}+!}JsMcGUKM|1`L+PFO-R_ z@<$|mWO+Zf!iQ0f-@Y#H;2g7baoo}bxi)d|Z$1x_@P|BUuAaUXZ0^b)6KCMtL3lb?ta<-oMrSrnhd$&KO*|A``2T+i9)V| zp=rk5t&_&>P{#!)#>YpV@j$AL)2HL~)i~uEA58?t>Adm#hC1W)@;JRYPKU-R8|#?E zDdUdMO%!0f96y?{OA2k8WygfM`olxRSM?)a@5aqr8wQb*Z@MH^Z~)E9OWk*A`VbO% zyKVB4>HWy6dUM-jqAy&mcu&=lf@|G|<3CkXaLDbrLXDL}XjL@2(R7g!cPl>#pVLXf z+omb{n|M-ihvhfjGP!<~AG-DWj=dDT#VRW_FN^`7e}2_;E`1o4KWTIdeLIY3Id@;O z(}z&EA+yu_ZU)TC!)wmvGl<+h=Bqa94x{b0Ou`tqYeSPw%vBgL-wop&r-j4l!4>OTtLj1Ia5OTpY%v4&Z_9b>A<&QT3OPFm z#{rZVZYh3Toe>L(^tLV&r{L6Wsh$~MzoFUsT&p!V4WTvn)2~g*rQj)x?q0m~ih}Fa zN{^i;^&-JwTAH?g2$`og*XoA%qbUaEqFYx|u;wOiy%)Vqn6IxRZH+Gj{!(`Cy5ScJ zF3bIV^W6>#-hE?%)&;44)VKV4kMH3gSM#qV$|`iAcIoui*B??`$M2PQhvng4VCZ`_Ylf$4(3;4#5QsN}=aqI+un=zyoN!}IABe6*(ieX#lvN>MRv z52H}<)5Xm(J{A3Ff6wc6iI0cS5sQQ2<*kFLqwj)7uQmA|U1lL8XHh>o@xFFXy95Ob zcRfwKbd!REJ#3<{c#?kLwA(TMV?Tn!XRkU(49CMLntYBEgzdR1+P;%ljvXGaX8wVL-<9hD*V zergA6w*w<)U2xIh!`C6iwpFY^+?}-34{Rk&(M-6w|Ma;9_n5Gjyp9O77irId#}*Xb z7)A=Lf;$^L22mQv#_GkSAFYa$jos4MkLsW9a^EAuj8AGgPiGclz{4)Wno+Jy`1$)7 z32(0Gn)gZ3YZ&nK zmqu|#w<$P3lk*UV2nEMC$?~yDFktT{#1lW25$Ar85MeMSijZS_>5CmH{tZQsqdkA4Zn7 zQyZ;9`jM)}*Nw~-6l^@B<(X9F5K>}gl(IEr#xv^;o!8h7Bfa@KGUxVD@ZC;>N#3rc zop|NHtxF$5>+N2%YbFh$iglIJHgSx&I(*B*crvavt!%h+;8H)jn=tt9yaXd2+%q}h zl^+9ES6J$IT$%;HkqO)Qh49LSG=4Njsal{HEV!3JzQK`u%3njcrEg!&{3EX=eVRDcHw0t`}b){Gjj~1mVFz-oDL78llIxwajrus zS59>45)K9&;25^y+kEo<(E>@;qY4bT_K^Oq&{^a=w}Rs44{5_lr)q#FS9%C((z#aE zk@3TUd*@>1O~c4ci+)^{p8?;=8~CC%c?g~FyEnKiV;EK4(a)VEL*^G_feRgB6rAY# zprPbzKXQ{SuTo@Uz$Jb*CUW~3@Xn0tGVPB;=(57RA#P$nV}@2>Z`UvidZBfrjI`UR zvz52kUL)-*VC11(_j%>~-z$@GPW_=yfUxy2V%dB<7&Z)G##lru-uIj%F}jk|Z&QOLMe|CLea?yF(MmeNq^n@#E= z!|!C^Bl3Mf9bM`4{XQv+#8WUa{_&iaDjE@JT%l*^lFk)5ar^Ux-Gvoj5b@W9cCis8+I?-X~*b>n9FH6L+wb+ln4wzW?=8msR)y5ZyT$HhLF;IBOx>x8gco{S8 zF`&?IJ~tk}uk}m3#te=;x1M;8JR;-&D}KcFjKAVX0>nj;BmX1UTgbe7@PCVb3NT1**v?kIgc((Mhqlxqh z{Rt{CE}!5h^#77RAs?r|N(aj5PavI(;xKwN(tk$UgNlXeXlQ;jF5~TBA`TN&@=)EL zS%x*}AEjKBi{s_s7%hhmL86^7*0VAiP5nF9Wxr9L8HN{|do17Lf0RzNtHLj11&|!S zfA_C!R}SZ&!Kn8mU&knAygghTOPBmE-NE(m>lYgTt(^(|U+j#vlZo_S^vBc3%kx|5 ze{JVyyZWEo-^92qbaZrhq8uA*USwPxt6~PqNc(4_{IA>pc>alY{{Py$5;!S}b3cg3 zvZ9c9uo_-y7nD_Zhq?AtnSt4bML`hIfTCmXOwVqcneJr{_CP!V!6>dEut9i&KYel} zsJuupqKwa&h(|!sNWjGFc>zz2LIlrzUmZQYJ<~h8{4`;G-M{^9O;vsMRn=EtRabTW z>#Ov4jeOvlXHx=kgf?7IDwQSs48K(KKHnMISj`9J%FszI3*l^ z^*AU7fMVs&HQx6DI?k7U0%0jTIyJ7r?_Lx8gO7%$_hE-j{Y_i>-Op;_=>IL;>$c*; zsjrs(EFI3%y;1+$`MXM<)9874{lq($->~q7jPU5N{~fzx`zGJc#TWl~Z@4Fn)1UWQ z<&)yipZB#*T{*b6Upn1~?GG%;KmCsy9jAYG{9Aj4-j8L3<2_6p+E~BV+0(3U-*A!d zCzyV72XO--^WQ&3U8$M@u(o;5ADB zsarL_jXZ~LY@d_3W^VEGS~&WTf9H96KTE%-eC;)nC6B;;EabkH`YpcE{_$gpj*X=p zxW3`M0H1g=21sEk5Rw7GEX4yJHS7tv<8pmeB=KzeLgp11Fm^)qh=2=;Ja%WA3~n)` z56(yMiOZ<w7@rDlN-KQ70!b9}@k=?^Oq!mQlwb?c zI_-`uoCsb!Bt=9d$;J*a-hi_PWICX~15rr?!AnuVqMMT=+8SaD-3~{gF-O3#2h}J` zUpPrkCK`kZ(160E1c9InOo&-isxcWdCUk+u1{v~jWY!6C(9>G&5lN|s3l^$`(hNCZ>Tc)(l6O!oXQjQupbUhD>B`t~69b-1 zePP-_%WYI&R3tf0K(Uw=6uWTdWLasc26@b?ayg)yPG^b3IoaW;B7epzhXDNAISY3M zr0Crv}nbq%qIQ>4?M?$OPf9GHRN_%R@FSj~=59#>1dUcnl{MIWU_QvRT(s%I9 z-?;ge>x2DK^TDse>iIvQo^Q?dJKOaAJ~sQxd(Ng00GI~Yw-<0bH!c8_4dW=nfs#5$*R~3Ld%swa?UKD%fSqDN z!~FlC9vv4&~yXrwKrT7nT(yH)vtp2!B8fg#Z?Tw1p0b!!@dWl&hwEWX4wV zxREv0#O4|j&dB(PU#CJ%14=SlC&hrmJXP4GCPDi9T8+(6C2!?>4i z=z%<%0S~aw+HN-v^*MIDE&b8nhXC`z_Dbsy05=@jH0+2JNFdy@vCGl^--8YrH)}f6 zp;>gW%AZAWRt#2XLwq1UiYmvYDj^X|O8KDQjmbn6(DThJyaFZ(F{-n~jWM-RtS68U%w9|YI6Mh(lYkuLg-eH`wYI1Vli?}>dMuE0 z8<$lu%tzvqE+i0K`U78PgJ`e}yDotpR$@VP#X0jg2E07_$@f**$KL$BF7gTEXLVJKZkS6O6XIJ(X^kyS$S4 zLC7w5Wo3oOS%Jdtz&sANO4DM7*r==oB-%d^=u2L7Q~@D|m^N*atQtj&?c3T zW&;zoDw=v_#$2^p6SNo14j5DT77c)1&?nLw&ipuL5ujF7DKJb9hk>tDmd{L^VwLd} zfz|?~%Lfut6inWV34k5KH+N7ES}Y==AT$xY_o!`HGUKQcf{7+z#L7<9D5%DuOTm|S z0Comr$#?=3A5ot9Fs^WHFye*raoIt?OG45N>b0G~+#==%E9%0MJ6Pcemz9RXE_a>B zDMAB;gO0i~XIZeUG8FcNJe6V|qvtqOB}fB!K0G|g!m%v)7{j@7VqAIo`NI43UsQCgFlf_*pERmq&`yfI<`c!6x)WOz0WR zS;Iy1a-L&CKhz|g_ZMS1&o!YxkA)lQ&o`kjHlZ&up|_ia4>JjOn1nk`!d)ieZj*2i z3+MgbX?NOPC3OH+rV*tqp5xCI*xSP(ZgbTvT{%m~jB~9quw$u)-ZI z7sHOQC+u-~AWC#N>%y+O3YM;dO}z343t}W9Oi#kF2?yUKSdGRcK~~6OlY!p_%}A|~ zgx7vtD5C4pD4flU#f!V5`%GP7W zLMx3(Hj)I45uN1>BM5DniPZys9hgFSWkm|ng*vnz5KzD z4!;lt)kq{6#pZy9WgYw28C#(h600Cx!Rcz9)Dq2~Yydc(7B}9{1W6EIu zA|~}o+#m!+iq*FWYFG;wib86#E+U~cJa$}m#Ofsh3PR+9+L?^O35f)B5Kw)vU=MSD zp(u_bt1lLy>mswOh6aN}i-n;|aOl;*r_?^I%s#B#&SU`z^cm_Ik%?3@cD^7;QHskV z{L;Dubh~CyU{KX_v01Qb@nnjwbDDG@tW1R+Cb~Ff@1V^3f`nwaGJw{nQH4@rqk>X5 zHNcS&n4m+EqLP5x#uQv_ByB4p5vpA}J#f{npD_1IyO62P|EkQL4m*exwkqH&PEby? z2t`=jFmFA5F_=Om8emwdS3!qC4zzagdW@-x93=V;RpP_4t(H$r*tlvrmIN#bSQ4-# zU`fD|fF%J-0+s|U30M-aBw$Ivl7J-vO9GYzED2Z=uq0qfz>)yiuwKiLd0i|diY&N6)Q;siS-P*`b^K;x$ zGY5cbF&%&K>xl|}!E@{w-&1_u?`PfBtDHP3iKTOzg>CQ8RJU znDLhe#*Mgm{6vU1-e<>qF~@(^T>VP*Nr(FQ?yITx+}{5m-q}w%r~S@-s_){GG0&9s z>gU_PwfTzW{vZ1qDyLRQSDoOSaCqpHUTgbn?inxtY159z?OyJ_xqIBXkN$3tR~>%v z>3uDEzE9-!-;7sJ^r?%Ux#6MlUwOGZsNvbr()p+QxI68h!ym0n{Ga!OWAiSbv3tMw zu7#t@*8l2D?^Ag*A_ZR_=i@GyYfo`GuC=@O5E<_y>gv5j#`}r7dQTC*ujs(H^x^M^ zPnnish(-nZ9#%jb_j@JZ=T@4$6?p7`j(UEXhZ4Y^Z16wv zai*;IBgR(mM{KR$kCVx;wc#76V+t*5wOtFtETxL}pP@$i$y@qNqv%Bb3{A5U24 z-|+hbo1`mO`+ch?^_@2S9)I*eRGKqmwg39ZZXa0v_I3U>1LvMS=9IPmgZm~f*?IO- z|M3&gUVf{6rGNN8DncLst=(U}VBx~gURvm1^~8twA3k-S|B|`W`Z_+i#jpHVc}P6r zA^*3h>|FQq8Hzu(h@BM4q{EMcZ|Kh&m7W@0{yJ^Ih z(RKdukC(;&zI?5Aui8))JUo3E(0VI#3mK61KbgCQxhdxMV)M;on0q2~FJlJ;mNWN7 z=Dx(-cbWShbAxPPRhj!RDknS@4ESHk4*<|$>%WNvj&vSqHcv_4$4I+k9^6@hPw!sy zX3#!)E$^DFL`T;=kQK_tPQUQ`SELY&hyvtOCYH+q0}Z%cav2B2vBNdw{3(X<6L?^& zn50bmDT1BhF?n(ufj$EKn`L98CwS^(YO)cmGRJKc4;;7Yk(5ZQ!>^KqIXt}*(8WL? zJ9?#M!~DpaJi4P<#80uXT6JvA&6KXCg}M~Y8n?B!Qa^9hBI-5$-+g`S?mFy)CR&z9 z3}t9D_>T@RUEk-~R(9-*KK19{|D}EF zW7AVZF8Sz|i+{aXa}R7?_|)XNOMTq^_N@i$*V?E0?sk-yKlk3PDP_tx0~=nOpK3p4 z!OpjrRQR}i(zbWE{d((lKJME8v~ltF?F)T7=ht3ST0Gx(=(NDxZT>l_%_kjy(wMtv zY3@Z=9TVH(`Gs%Ykg=i9>X-T+?{}Lxsje;c;QS#6u6%M%>X*Z}y;}X+iqyJ|{oh+P zp^R->zKq(=DR!!&A>&KJENFQq{-a{OD=N_*1Rp ze>D4ozHuLSHw+puV944#Q`~(a^1#Mz`O{LPBNtx%`|~dkdp~tPKJ30P4tlqYywT?0 z^@aDE_s=S-IsGfmJ%7Mk%cQRlc)5GpF&mr4-uadHquJZTgY&=k?mu+Hu4R{e>b-9D zZsqWr&op=V>OZ)?dGWB9yG>6mzG(6lUwV%n*Grur{@iu_MStU zJAF`r_cuH4N%a{M_Pzh|!ql>_H_a+3Y)}2H_P7mucCAP)OL})yRxeE*`uRtjFIjd+ zYF=vP`?Gy3G`B8v_0IL&lV>hSofd3Unielk4gA=%`;`?pr&42=ta)tJ zwQ2X$dn*@jT9!&Nci(3D;ki@VQh$AC>GHt-WvN~djXb?tX_5UH0`C?C>Hy`ObqIq`q`Sbr*c@)jD;cDbDvE%nRo^8~Kv6E^bZ1Tj*(IF9>AMwM#^u4cm zFL<6UEQgXoS&Rf?VBHJi&ECfy`1tPgb;5@L8@j}&v+}fdqdcAS>3Vqz9!&MMH2^mh zT^`7)K>J>vfq&>lCBi?8a+mKX^ZjSO-^}-~`xEXq8{dyk%aPv0+aQS{DVF;lUbOqd z^{oPUruD54{Kor$$(Mg^A0VUrtQ_0f_t12DS`M&QLK+}fI~J5wPyX_(Gkn?7qu_I; zXT{K!pWi2PumCC2Ta$Uj_uM4*6O-k59p8o1WF}Mo&W#< From aeab169e75b1af7d3b3ac83154396efacb8c2984 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 14:11:40 +0000 Subject: [PATCH 085/102] also replace np.complex --- NuRadioMC/simulation/simulation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioMC/simulation/simulation.py b/NuRadioMC/simulation/simulation.py index 0bc99a85d..b7bc5a12d 100644 --- a/NuRadioMC/simulation/simulation.py +++ b/NuRadioMC/simulation/simulation.py @@ -306,7 +306,7 @@ def __init__(self, inputfilename, self._amplification_per_channel[self._station_id] = {} for channel_id in range(self._det.get_number_of_channels(self._station_id)): ff = np.linspace(0, 0.5 / self._dt, 10000) - filt = np.ones_like(ff, dtype=np.complex) + filt = np.ones_like(ff, dtype=complex) for i, (name, instance, kwargs) in enumerate(self._evt.iter_modules(self._station_id)): if hasattr(instance, "get_filter"): filt *= instance.get_filter(ff, self._station_id, channel_id, self._det, **kwargs) From 0a549b27a5a94871c61c0b287d73e0374ed68b18 Mon Sep 17 00:00:00 2001 From: Sjoerd Bouma Date: Tue, 16 May 2023 17:33:50 +0200 Subject: [PATCH 086/102] re-update version number [ci skip] --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0e5e01219..7fb653fb4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "NuRadioMC" -version = "2.1.8" +version = "2.2.0-dev" authors = ["Christian Glaser et al."] homepage = "https://github.com/nu-radio/NuRadioMC" documentation = "https://nu-radio.github.io/NuRadioMC/main.html" From c83ea2a250a7904fd6302e0fad64775c94b0e4da Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 17:34:01 +0000 Subject: [PATCH 087/102] fix all occurences of np.complex --- NuRadioReco/detector/amp.py | 2 +- NuRadioReco/detector/antennapattern.py | 6 +++--- NuRadioReco/detector/filterresponse.py | 6 +++--- NuRadioReco/modules/channelGalacticNoiseAdder.py | 4 ++-- .../voltageToEfieldAnalyticConverterForNeutrinos.py | 8 ++++---- NuRadioReco/modules/voltageToEfieldConverter.py | 6 +++--- NuRadioReco/modules/voltageToEfieldConverterPerChannel.py | 2 +- NuRadioReco/utilities/bandpass_filter.py | 6 +++--- NuRadioReco/utilities/trace_utilities.py | 8 ++++---- 9 files changed, 24 insertions(+), 24 deletions(-) diff --git a/NuRadioReco/detector/amp.py b/NuRadioReco/detector/amp.py index 8133af0d6..04dadc714 100644 --- a/NuRadioReco/detector/amp.py +++ b/NuRadioReco/detector/amp.py @@ -25,7 +25,7 @@ def get_amp_response(frequencies, amp_name): get_phase = intp.interp1d(freqs2, np.unwrap(phase)) get_linmag = intp.interp1d(freqs, linmag) - amp_response = np.zeros_like(frequencies, dtype=np.complex) + amp_response = np.zeros_like(frequencies, dtype=complex) mask = (frequencies > max(freqs.min(), freqs2.min())) & (frequencies < min(freqs.max(), freqs2.max())) amp_response[mask] = get_linmag(frequencies[mask]) * np.exp(1j * get_phase(frequencies[mask])) return amp_response diff --git a/NuRadioReco/detector/antennapattern.py b/NuRadioReco/detector/antennapattern.py index 19f79a50e..2452428e1 100644 --- a/NuRadioReco/detector/antennapattern.py +++ b/NuRadioReco/detector/antennapattern.py @@ -64,7 +64,7 @@ def interpolate_linear_vectorized(x, x0, x1, y0, y1, interpolation_method='compl """ x = np.array(x) mask = x0 != x1 - result = np.zeros_like(x, dtype=np.complex) + result = np.zeros_like(x, dtype=complex) denominator = x1 - x0 if interpolation_method == 'complex': result[mask] = y0[mask] + (y1[mask] - y0[mask]) * (x[mask] - x0[mask]) / denominator[mask] @@ -1059,8 +1059,8 @@ def get_antenna_response_vectorized(self, freq, zenith, azimuth, orientation_the of the same length as the frequency input """ if self._notfound: - VEL = {'theta': np.ones(len(freq), dtype=np.complex), - 'phi': np.ones(len(freq), dtype=np.complex)} + VEL = {'theta': np.ones(len(freq), dtype=complex), + 'phi': np.ones(len(freq), dtype=complex)} return VEL if isinstance(freq, (float, int)): diff --git a/NuRadioReco/detector/filterresponse.py b/NuRadioReco/detector/filterresponse.py index 66dbd1d79..58008e251 100644 --- a/NuRadioReco/detector/filterresponse.py +++ b/NuRadioReco/detector/filterresponse.py @@ -24,7 +24,7 @@ def get_filter_response_mini_circuits(frequencies, filter_name): get_S21 = intp.interp1d(ff, S21) - response = np.zeros_like(frequencies, dtype=np.complex) + response = np.zeros_like(frequencies, dtype=complex) mask = (frequencies > ff.min()) & (frequencies < ff.max()) response[mask] = get_S21(frequencies[mask]) return response @@ -54,7 +54,7 @@ def get_filter_response_mini_circuits2(frequencies, filter_name): phase2 = -2 * np.pi * np.cumsum(get_group_delay(fff2) * df) get_phase = intp.interp1d(fff2, phase2) - response = np.zeros_like(frequencies, dtype=np.complex) + response = np.zeros_like(frequencies, dtype=complex) mask = (frequencies > max(ff.min(), ff2.min())) & (frequencies < min(ff.max(), ff2.max())) response[mask] = get_insertion_loss(frequencies[mask]) * np.exp(1j * get_phase(frequencies[mask])) return response @@ -83,7 +83,7 @@ def get_filter_response(frequencies, filter_name): get_phase = intp.interp1d(ff2, np.unwrap(phase)) get_insertion_loss = intp.interp1d(ff, insertion_loss) - response = np.zeros_like(frequencies, dtype=np.complex) + response = np.zeros_like(frequencies, dtype=complex) mask = (frequencies > max(ff.min(), ff2.min())) & (frequencies < min(ff.max(), ff2.max())) response[mask] = get_insertion_loss(frequencies[mask]) * np.exp(1j * get_phase(frequencies[mask])) return response diff --git a/NuRadioReco/modules/channelGalacticNoiseAdder.py b/NuRadioReco/modules/channelGalacticNoiseAdder.py index 0638fb423..9afafc989 100644 --- a/NuRadioReco/modules/channelGalacticNoiseAdder.py +++ b/NuRadioReco/modules/channelGalacticNoiseAdder.py @@ -122,7 +122,7 @@ def run( passband_filter = (freqs > passband[0]) & (freqs < passband[1]) noise_spec_sum = np.zeros_like(channel.get_frequency_spectrum()) flux_sum = np.zeros(freqs[passband_filter].shape) - efield_sum = np.zeros((3, freqs.shape[0]), dtype=np.complex) + efield_sum = np.zeros((3, freqs.shape[0]), dtype=complex) if self.__debug: plt.close('all') fig = plt.figure(figsize=(12, 8)) @@ -177,7 +177,7 @@ def run( ax_3.plot(freqs[passband_filter] / units.MHz, E / (units.V / units.m), c='k', alpha=.02) # assign random phases and polarizations to electric field - noise_spectrum = np.zeros((3, freqs.shape[0]), dtype=np.complex) + noise_spectrum = np.zeros((3, freqs.shape[0]), dtype=complex) phases = np.random.uniform(0, 2. * np.pi, len(S)) polarizations = np.random.uniform(0, 2. * np.pi, len(S)) diff --git a/NuRadioReco/modules/neutrinoDirectionReconstructor/voltageToEfieldAnalyticConverterForNeutrinos.py b/NuRadioReco/modules/neutrinoDirectionReconstructor/voltageToEfieldAnalyticConverterForNeutrinos.py index e672b51d6..07f7daa19 100644 --- a/NuRadioReco/modules/neutrinoDirectionReconstructor/voltageToEfieldAnalyticConverterForNeutrinos.py +++ b/NuRadioReco/modules/neutrinoDirectionReconstructor/voltageToEfieldAnalyticConverterForNeutrinos.py @@ -187,14 +187,14 @@ def minimizer(params, minimizer=True): order = 5 b, a = signal.butter(order, passband_high[use_channels[iA]], 'bandpass', analog=True) w, h = signal.freqs(b, a, ff[mask]) - f = np.zeros_like(ff, dtype=np.complex) + f = np.zeros_like(ff, dtype=complex) f[mask] = h trace_spectrum *= f order = 10 b, a = signal.butter(order, passband_low[use_channels[iA]], 'bandpass', analog=True) w, h = signal.freqs(b, a, ff[mask]) - f = np.zeros_like(ff, dtype=np.complex) + f = np.zeros_like(ff, dtype=complex) f[mask] = h trace_spectrum *= f @@ -352,8 +352,8 @@ def plotSimulatedVersusReconstructedTraces(nu_zenith, nu_azimuth, shower_energy, travel_distance = np.zeros((n_antennas, maxNumRayTracingSolPerChan)) attenuation = np.zeros((n_antennas, maxNumRayTracingSolPerChan, len(ff))) focusing = np.zeros((n_antennas, maxNumRayTracingSolPerChan, 1)) - reflection_coefficients_theta = np.ones((n_antennas, maxNumRayTracingSolPerChan), dtype=np.complex) - reflection_coefficients_phi = np.ones((n_antennas, maxNumRayTracingSolPerChan), dtype=np.complex) + reflection_coefficients_theta = np.ones((n_antennas, maxNumRayTracingSolPerChan), dtype=complex) + reflection_coefficients_phi = np.ones((n_antennas, maxNumRayTracingSolPerChan), dtype=complex) travel_time_min = float('inf') for iA, position in enumerate(antenna_positions): r = ray.ray_tracing(icemodel, attenuation_model=attenuation_model, n_frequencies_integration=25, n_reflections=n_reflections) diff --git a/NuRadioReco/modules/voltageToEfieldConverter.py b/NuRadioReco/modules/voltageToEfieldConverter.py index 2b66efa12..df6144a48 100644 --- a/NuRadioReco/modules/voltageToEfieldConverter.py +++ b/NuRadioReco/modules/voltageToEfieldConverter.py @@ -77,7 +77,7 @@ def get_array_of_channels(station, use_channels, det, zenith, azimuth, for iCh, trace in enumerate(traces): V_timedomain[iCh] = trace.get_trace() frequencies = traces[0].get_frequencies() # assumes that all channels have the same sampling rate - V = np.zeros((len(use_channels), len(frequencies)), dtype=np.complex) + V = np.zeros((len(use_channels), len(frequencies)), dtype=complex) for iCh, trace in enumerate(traces): V[iCh] = trace.get_frequency_spectrum() @@ -170,7 +170,7 @@ def run(self, evt, station, det, use_channels=None, use_MC_direction=False, forc E1[mask] = (V[0] * efield_antenna_factor[-1][1] - V[-1] * efield_antenna_factor[0][1])[mask] / denom[mask] E2[mask] = (V[-1] - efield_antenna_factor[-1][0] * E1)[mask] / efield_antenna_factor[-1][1][mask] # solve it in a vectorized way - efield3_f = np.zeros((2, n_frequencies), dtype=np.complex) + efield3_f = np.zeros((2, n_frequencies), dtype=complex) if force_Polarization == 'eTheta': efield3_f[:1, mask] = np.moveaxis(stacked_lstsq(np.moveaxis(efield_antenna_factor[:, 0, mask], 1, 0)[:, :, np.newaxis], np.moveaxis(V[:, mask], 1, 0)), 0, 1) elif force_Polarization == 'ePhi': @@ -178,7 +178,7 @@ def run(self, evt, station, det, use_channels=None, use_MC_direction=False, forc else: efield3_f[:, mask] = np.moveaxis(stacked_lstsq(np.moveaxis(efield_antenna_factor[:, :, mask], 2, 0), np.moveaxis(V[:, mask], 1, 0)), 0, 1) # add eR direction - efield3_f = np.array([np.zeros_like(efield3_f[0], dtype=np.complex), + efield3_f = np.array([np.zeros_like(efield3_f[0], dtype=complex), efield3_f[0], efield3_f[1]]) diff --git a/NuRadioReco/modules/voltageToEfieldConverterPerChannel.py b/NuRadioReco/modules/voltageToEfieldConverterPerChannel.py index d83319e16..bdc79caf8 100644 --- a/NuRadioReco/modules/voltageToEfieldConverterPerChannel.py +++ b/NuRadioReco/modules/voltageToEfieldConverterPerChannel.py @@ -65,7 +65,7 @@ def run(self, evt, station, det, pol=0): trace = channel.get_frequency_spectrum() mask1 = np.abs(efield_antenna_factor[iCh][0]) != 0 mask2 = np.abs(efield_antenna_factor[iCh][1]) != 0 - efield_spectrum = np.zeros((3, len(trace)), dtype=np.complex) + efield_spectrum = np.zeros((3, len(trace)), dtype=complex) efield_spectrum[1][mask1] = (1.0 - pol) ** 2 * trace[mask1] / efield_antenna_factor[iCh][0][mask1] efield_spectrum[2][mask2] = pol ** 2 * trace[mask2] / efield_antenna_factor[iCh][1][mask2] efield.set_frequency_spectrum(efield_spectrum, sampling_rate) diff --git a/NuRadioReco/utilities/bandpass_filter.py b/NuRadioReco/utilities/bandpass_filter.py index d02035e8a..b91700b55 100644 --- a/NuRadioReco/utilities/bandpass_filter.py +++ b/NuRadioReco/utilities/bandpass_filter.py @@ -48,21 +48,21 @@ def get_filter_response(frequencies, passband, filter_type, order, rp=None): f[np.where(frequencies > passband[1])] = 0. return f elif (filter_type == 'butter'): - f = np.zeros_like(frequencies, dtype=np.complex) + f = np.zeros_like(frequencies, dtype=complex) mask = frequencies > 0 b, a = scipy.signal.butter(order, *scipy_args, analog=True) w, h = scipy.signal.freqs(b, a, frequencies[mask]) f[mask] = h return f elif (filter_type == 'butterabs'): - f = np.zeros_like(frequencies, dtype=np.complex) + f = np.zeros_like(frequencies, dtype=complex) mask = frequencies > 0 b, a = scipy.signal.butter(order, *scipy_args, analog=True) w, h = scipy.signal.freqs(b, a, frequencies[mask]) f[mask] = h return np.abs(f) elif(filter_type == 'cheby1'): - f = np.zeros_like(frequencies, dtype=np.complex) + f = np.zeros_like(frequencies, dtype=complex) mask = frequencies > 0 b, a = scipy.signal.cheby1(order, rp, *scipy_args, analog=True) w, h = scipy.signal.freqs(b, a, frequencies[mask]) diff --git a/NuRadioReco/utilities/trace_utilities.py b/NuRadioReco/utilities/trace_utilities.py index ca0fe352d..57bc82fb4 100644 --- a/NuRadioReco/utilities/trace_utilities.py +++ b/NuRadioReco/utilities/trace_utilities.py @@ -32,7 +32,7 @@ def get_efield_antenna_factor(station, frequencies, channels, detector, zenith, antenna_pattern_provider: AntennaPatternProvider """ n_ice = ice.get_refractive_index(-0.01, detector.get_site(station.get_id())) - efield_antenna_factor = np.zeros((len(channels), 2, len(frequencies)), dtype=np.complex) # from antenna model in e_theta, e_phi + efield_antenna_factor = np.zeros((len(channels), 2, len(frequencies)), dtype=complex) # from antenna model in e_theta, e_phi for iCh, channel_id in enumerate(channels): zenith_antenna = zenith t_theta = 1. @@ -88,12 +88,12 @@ def get_channel_voltage_from_efield(station, electric_field, channels, detector, spectrum = electric_field.get_frequency_spectrum() efield_antenna_factor = get_efield_antenna_factor(station, frequencies, channels, detector, zenith, azimuth, antenna_pattern_provider) if return_spectrum: - voltage_spectrum = np.zeros((len(channels), len(frequencies)), dtype=np.complex) + voltage_spectrum = np.zeros((len(channels), len(frequencies)), dtype=complex) for i_ch, ch in enumerate(channels): voltage_spectrum[i_ch] = np.sum(efield_antenna_factor[i_ch] * np.array([spectrum[1], spectrum[2]]), axis=0) return voltage_spectrum else: - voltage_trace = np.zeros((len(channels), 2 * (len(frequencies) - 1)), dtype=np.complex) + voltage_trace = np.zeros((len(channels), 2 * (len(frequencies) - 1)), dtype=complex) for i_ch, ch in enumerate(channels): voltage_trace[i_ch] = fft.freq2time(np.sum(efield_antenna_factor[i_ch] * np.array([spectrum[1], spectrum[2]]), axis=0), electric_field.get_sampling_rate()) return np.real(voltage_trace) @@ -218,7 +218,7 @@ def apply_butterworth(spectrum, frequencies, passband, order=8): The filtered spectrum """ - f = np.zeros_like(frequencies, dtype=np.complex) + f = np.zeros_like(frequencies, dtype=complex) mask = frequencies > 0 b, a = scipy.signal.butter(order, passband, 'bandpass', analog=True) w, h = scipy.signal.freqs(b, a, frequencies[mask]) From 7a0cf4e55b530147525282f64b92b932ec278e2e Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 17:35:49 +0000 Subject: [PATCH 088/102] also change np.bool --- NuRadioMC/EvtGen/generate_unforced.py | 2 +- NuRadioMC/EvtGen/generator.py | 2 +- .../A05visualization.py | 2 +- .../scripts/T05visualize_sim_output.py | 12 ++++---- NuRadioMC/simulation/simulation.py | 28 +++++++++---------- NuRadioMC/utilities/Veff.py | 16 +++++------ NuRadioMC/utilities/merge_hdf5.py | 2 +- .../electricFieldSignalReconstructor.py | 4 +-- .../modules/trigger/highLowThreshold.py | 4 +-- 9 files changed, 36 insertions(+), 36 deletions(-) diff --git a/NuRadioMC/EvtGen/generate_unforced.py b/NuRadioMC/EvtGen/generate_unforced.py index 3732d9909..718f3fe0f 100644 --- a/NuRadioMC/EvtGen/generate_unforced.py +++ b/NuRadioMC/EvtGen/generate_unforced.py @@ -328,7 +328,7 @@ def points_in_cylinder(pt1, pt2, r, q): 'flavors': [], 'energies': []} # calculate rotation matrix to transform position on area to 3D - mask_int = np.zeros_like(mask, dtype=np.bool) + mask_int = np.zeros_like(mask, dtype=bool) t0 = time.perf_counter() n_cylinder = 0 for j, i in enumerate(np.arange(n_events, dtype=int)[mask]): diff --git a/NuRadioMC/EvtGen/generator.py b/NuRadioMC/EvtGen/generator.py index 04e027021..ae68676dc 100644 --- a/NuRadioMC/EvtGen/generator.py +++ b/NuRadioMC/EvtGen/generator.py @@ -928,7 +928,7 @@ def generate_surface_muons(filename, n_events, Emin, Emax, if('fiducial_rmax' in attributes): mask_phi = mask_arrival_azimuth(data_sets, attributes['fiducial_rmax']) # this currently only works for cylindrical volumes else: - mask_phi = np.ones(len(data_sets["event_group_ids"]), dtype=np.bool) + mask_phi = np.ones(len(data_sets["event_group_ids"]), dtype=bool) # TODO: combine with `get_intersection_volume_neutrino` function for iE, event_id in enumerate(data_sets["event_group_ids"]): if not mask_phi[iE]: diff --git a/NuRadioMC/examples/03_station_coincidences/A05visualization.py b/NuRadioMC/examples/03_station_coincidences/A05visualization.py index 99095c5c0..e69c6f2d5 100644 --- a/NuRadioMC/examples/03_station_coincidences/A05visualization.py +++ b/NuRadioMC/examples/03_station_coincidences/A05visualization.py @@ -79,7 +79,7 @@ x2 = det.get_relative_position(101, iC) if(plot): ax.plot([x2[0]], [x2[1]], [x2[2]], 'ko') - if(j != 0 and (~(np.array(triggered_deep[iE], dtype=np.bool) & mask))[j]): + if(j != 0 and (~(np.array(triggered_deep[iE], dtype=bool) & mask))[j]): continue vertex = np.array([fin['xx'][iE], fin['yy'][iE], fin['zz'][iE]]) # print(fin.keys()) diff --git a/NuRadioMC/simulation/scripts/T05visualize_sim_output.py b/NuRadioMC/simulation/scripts/T05visualize_sim_output.py index a1c046cbf..9585974db 100644 --- a/NuRadioMC/simulation/scripts/T05visualize_sim_output.py +++ b/NuRadioMC/simulation/scripts/T05visualize_sim_output.py @@ -42,14 +42,14 @@ plot_folder = os.path.join(dirname, 'plots', filename, args.trigger_name[0]) if(not os.path.exists(plot_folder)): os.makedirs(plot_folder) - triggered = np.zeros(len(fin['multiple_triggers'][:, 0]), dtype=np.bool) + triggered = np.zeros(len(fin['multiple_triggers'][:, 0]), dtype=bool) for trigger in args.trigger_name[1:]: iTrigger = np.squeeze(np.argwhere(fin.attrs['trigger_names'] == trigger)) - triggered = triggered | np.array(fin['multiple_triggers'][:, iTrigger], dtype=np.bool) + triggered = triggered | np.array(fin['multiple_triggers'][:, iTrigger], dtype=bool) else: trigger_name = args.trigger_name[0] iTrigger = np.argwhere(fin.attrs['trigger_names'] == trigger_name) - triggered = np.array(fin['multiple_triggers'][:, iTrigger], dtype=np.bool) + triggered = np.array(fin['multiple_triggers'][:, iTrigger], dtype=bool) print("\tyou selected '{}'".format(trigger_name)) plot_folder = os.path.join(dirname, 'plots', filename, trigger_name) if(not os.path.exists(plot_folder)): @@ -147,14 +147,14 @@ def a(theta): if(len(args.trigger_name) > 1): print("trigger {} selected which is a combination of {}".format(args.trigger_name[0], args.trigger_name[1:])) trigger_name = args.trigger_name[0] - triggered = np.zeros(len(station['multiple_triggers'][:, 0]), dtype=np.bool) + triggered = np.zeros(len(station['multiple_triggers'][:, 0]), dtype=bool) for trigger in args.trigger_name[1:]: iTrigger = np.squeeze(np.argwhere(fin.attrs['trigger_names'] == trigger)) - triggered = triggered | np.array(station['multiple_triggers'][:, iTrigger], dtype=np.bool) + triggered = triggered | np.array(station['multiple_triggers'][:, iTrigger], dtype=bool) else: trigger_name = args.trigger_name[0] iTrigger = np.argwhere(fin.attrs['trigger_names'] == trigger_name) - triggered = np.array(station['multiple_triggers'][:, iTrigger], dtype=np.bool) + triggered = np.array(station['multiple_triggers'][:, iTrigger], dtype=bool) print("\tyou selected '{}'".format(trigger_name)) ########################### diff --git a/NuRadioMC/simulation/simulation.py b/NuRadioMC/simulation/simulation.py index a7a5fdebb..42a1d19fa 100644 --- a/NuRadioMC/simulation/simulation.py +++ b/NuRadioMC/simulation/simulation.py @@ -1150,12 +1150,12 @@ def _calculate_signal_properties(self): def _create_empty_multiple_triggers(self): if 'trigger_names' not in self._mout_attrs: self._mout_attrs['trigger_names'] = np.array([]) - self._mout['multiple_triggers'] = np.zeros((self._n_showers, 1), dtype=np.bool) + self._mout['multiple_triggers'] = np.zeros((self._n_showers, 1), dtype=bool) for station_id in self._station_ids: sg = self._mout_groups[station_id] n_showers = sg['launch_vectors'].shape[0] - sg['multiple_triggers'] = np.zeros((n_showers, 1), dtype=np.bool) - sg['triggered'] = np.zeros(n_showers, dtype=np.bool) + sg['multiple_triggers'] = np.zeros((n_showers, 1), dtype=bool) + sg['triggered'] = np.zeros(n_showers, dtype=bool) def _create_trigger_structures(self): @@ -1170,13 +1170,13 @@ def _create_trigger_structures(self): # simulated triggers is unknown at the beginning. So we check if the key already exists and if not, # we first create this data structure if 'multiple_triggers' not in self._mout: - self._mout['multiple_triggers'] = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=np.bool) + self._mout['multiple_triggers'] = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=bool) self._mout['trigger_times'] = np.nan * np.zeros_like(self._mout['multiple_triggers'], dtype=float) # for station_id in self._station_ids: # sg = self._mout_groups[station_id] -# sg['multiple_triggers'] = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=np.bool) +# sg['multiple_triggers'] = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=bool) elif extend_array: - tmp = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=np.bool) + tmp = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=bool) nx, ny = self._mout['multiple_triggers'].shape tmp[:, 0:ny] = self._mout['multiple_triggers'] self._mout['multiple_triggers'] = tmp @@ -1186,7 +1186,7 @@ def _create_trigger_structures(self): self._mout['trigger_times'] = tmp_t # for station_id in self._station_ids: # sg = self._mout_groups[station_id] -# tmp = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=np.bool) +# tmp = np.zeros((self._n_showers, len(self._mout_attrs['trigger_names'])), dtype=bool) # nx, ny = sg['multiple_triggers'].shape # tmp[:, 0:ny] = sg['multiple_triggers'] # sg['multiple_triggers'] = tmp @@ -1199,10 +1199,10 @@ def _save_triggers_to_hdf5(self, sg, local_shower_index, global_shower_index): # the information fo the current station and event group n_showers = sg['launch_vectors'].shape[0] if 'multiple_triggers' not in sg: - sg['multiple_triggers'] = np.zeros((n_showers, len(self._mout_attrs['trigger_names'])), dtype=np.bool) + sg['multiple_triggers'] = np.zeros((n_showers, len(self._mout_attrs['trigger_names'])), dtype=bool) sg['trigger_times'] = np.nan * np.zeros_like(sg['multiple_triggers'], dtype=float) elif extend_array: - tmp = np.zeros((n_showers, len(self._mout_attrs['trigger_names'])), dtype=np.bool) + tmp = np.zeros((n_showers, len(self._mout_attrs['trigger_names'])), dtype=bool) nx, ny = sg['multiple_triggers'].shape tmp[:, 0:ny] = sg['multiple_triggers'] sg['multiple_triggers'] = tmp @@ -1213,7 +1213,7 @@ def _save_triggers_to_hdf5(self, sg, local_shower_index, global_shower_index): self._output_event_group_ids[self._station_id].append(self._evt.get_run_number()) self._output_sub_event_ids[self._station_id].append(self._evt.get_id()) - multiple_triggers = np.zeros(len(self._mout_attrs['trigger_names']), dtype=np.bool) + multiple_triggers = np.zeros(len(self._mout_attrs['trigger_names']), dtype=bool) trigger_times = np.nan*np.zeros_like(multiple_triggers) for iT, trigger_name in enumerate(self._mout_attrs['trigger_names']): if self._station.has_trigger(trigger_name): @@ -1262,8 +1262,8 @@ def _create_meta_output_datastructures(self): self._mout = {} self._mout_attributes = {} self._mout['weights'] = np.zeros(self._n_showers) - self._mout['triggered'] = np.zeros(self._n_showers, dtype=np.bool) -# self._mout['multiple_triggers'] = np.zeros((self._n_showers, self._number_of_triggers), dtype=np.bool) + self._mout['triggered'] = np.zeros(self._n_showers, dtype=bool) +# self._mout['multiple_triggers'] = np.zeros((self._n_showers, self._number_of_triggers), dtype=bool) self._mout_attributes['trigger_names'] = None self._amplitudes = {} self._amplitudes_envelope = {} @@ -1289,7 +1289,7 @@ def _create_meta_output_datastructures(self): def _create_station_output_structure(self, n_showers, n_antennas): nS = self._raytracer.get_number_of_raytracing_solutions() # number of possible ray-tracing solutions sg = {} - sg['triggered'] = np.zeros(n_showers, dtype=np.bool) + sg['triggered'] = np.zeros(n_showers, dtype=bool) # we need the reference to the shower id to be able to find the correct shower in the upper level hdf5 file sg['shower_id'] = np.zeros(n_showers, dtype=int) * -1 sg['event_id_per_shower'] = np.zeros(n_showers, dtype=int) * -1 @@ -1424,7 +1424,7 @@ def _write_output_file(self, empty=False): # the multiple triggeres 2d array might have different number of entries per event # because the number of different triggers can increase dynamically # therefore we first create an array with the right size and then fill it - tmp = np.zeros((n_events_for_station, n_triggers), dtype=np.bool) + tmp = np.zeros((n_events_for_station, n_triggers), dtype=bool) for iE, values in enumerate(self._output_multiple_triggers_station[station_id]): tmp[iE] = values sg['multiple_triggers_per_event'] = tmp diff --git a/NuRadioMC/utilities/Veff.py b/NuRadioMC/utilities/Veff.py index 9ee5eaa14..00aecb933 100644 --- a/NuRadioMC/utilities/Veff.py +++ b/NuRadioMC/utilities/Veff.py @@ -335,35 +335,35 @@ def get_Veff_Aeff_single( return out for iT, trigger_name in enumerate(trigger_names): - triggered = np.array(fin['multiple_triggers'][:, iT], dtype=np.bool) + triggered = np.array(fin['multiple_triggers'][:, iT], dtype=bool) triggered = remove_duplicate_triggers(triggered, fin['event_group_ids']) out[veff_aeff][trigger_name] = get_veff_output(volume_proj_area, np.sum(weights[triggered]), n_events) for trigger_name, values in iteritems(trigger_combinations): indiv_triggers = values['triggers'] - triggered = np.zeros_like(fin['multiple_triggers'][:, 0], dtype=np.bool) + triggered = np.zeros_like(fin['multiple_triggers'][:, 0], dtype=bool) if isinstance(indiv_triggers, str): triggered = triggered | \ - np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_triggers]], dtype=np.bool) + np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_triggers]], dtype=bool) else: for indiv_trigger in indiv_triggers: triggered = triggered | \ - np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_trigger]], dtype=np.bool) + np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_trigger]], dtype=bool) if 'triggerAND' in values: triggered = triggered & \ - np.array(fin['multiple_triggers'][:, trigger_names_dict[values['triggerAND']]], dtype=np.bool) + np.array(fin['multiple_triggers'][:, trigger_names_dict[values['triggerAND']]], dtype=bool) if 'notriggers' in values: indiv_triggers = values['notriggers'] if(isinstance(indiv_triggers, str)): triggered = triggered & \ - ~np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_triggers]], dtype=np.bool) + ~np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_triggers]], dtype=bool) else: for indiv_trigger in indiv_triggers: triggered = triggered & \ - ~np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_trigger]], dtype=np.bool) + ~np.array(fin['multiple_triggers'][:, trigger_names_dict[indiv_trigger]], dtype=bool) if 'min_sigma' in values.keys(): if isinstance(values['min_sigma'], list): @@ -396,7 +396,7 @@ def get_Veff_Aeff_single( As = np.array(fin['max_amp_ray_solution']) max_amps = np.argmax(As[:, values['ray_channel']], axis=-1) sol = np.array(fin['ray_tracing_solution_type']) - mask = np.array([sol[i, values['ray_channel'], max_amps[i]] == values['ray_solution'] for i in range(len(max_amps))], dtype=np.bool) + mask = np.array([sol[i, values['ray_channel'], max_amps[i]] == values['ray_solution'] for i in range(len(max_amps))], dtype=bool) triggered = triggered & mask if 'n_reflections' in values.keys(): diff --git a/NuRadioMC/utilities/merge_hdf5.py b/NuRadioMC/utilities/merge_hdf5.py index cb31765d9..464f50733 100644 --- a/NuRadioMC/utilities/merge_hdf5.py +++ b/NuRadioMC/utilities/merge_hdf5.py @@ -266,7 +266,7 @@ def merge2(filenames, output_filename): # try: input_files = np.array(sorted(glob.glob(filename + '.part????'))) input_files = np.append(input_files, np.array(sorted(glob.glob(filename + '.part??????')))) - mask = np.array([os.path.getsize(x) > 1000 for x in input_files], dtype=np.bool) + mask = np.array([os.path.getsize(x) > 1000 for x in input_files], dtype=bool) if(np.sum(~mask)): logger.warning("{:d} files were deselected because their filesize was to small".format(np.sum(~mask))) input_args.append({'filenames': input_files[mask], 'output_filename': output_filename}) diff --git a/NuRadioReco/modules/electricFieldSignalReconstructor.py b/NuRadioReco/modules/electricFieldSignalReconstructor.py index 94252131b..9f4089d2b 100644 --- a/NuRadioReco/modules/electricFieldSignalReconstructor.py +++ b/NuRadioReco/modules/electricFieldSignalReconstructor.py @@ -98,9 +98,9 @@ def run(self, evt, station, det, debug=False): times = electric_field.get_times() mask_signal_window = (times > (signal_time - self.__signal_window_pre)) & (times < (signal_time + self.__signal_window_post)) - mask_noise_window = np.zeros_like(mask_signal_window, dtype=np.bool) + mask_noise_window = np.zeros_like(mask_signal_window, dtype=bool) if(self.__noise_window > 0): - mask_noise_window[int(np.round((-self.__noise_window - 141.) * electric_field.get_sampling_rate())):int(np.round(-141. * electric_field.get_sampling_rate()))] = np.ones(int(np.round(self.__noise_window * electric_field.get_sampling_rate())), dtype=np.bool) # the last n bins + mask_noise_window[int(np.round((-self.__noise_window - 141.) * electric_field.get_sampling_rate())):int(np.round(-141. * electric_field.get_sampling_rate()))] = np.ones(int(np.round(self.__noise_window * electric_field.get_sampling_rate())), dtype=bool) # the last n bins signal_energy_fluence = trace_utilities.get_electric_field_energy_fluence(trace, times, mask_signal_window, mask_noise_window) dt = times[1] - times[0] diff --git a/NuRadioReco/modules/trigger/highLowThreshold.py b/NuRadioReco/modules/trigger/highLowThreshold.py index c08e9d7c4..cff116833 100644 --- a/NuRadioReco/modules/trigger/highLowThreshold.py +++ b/NuRadioReco/modules/trigger/highLowThreshold.py @@ -32,7 +32,7 @@ def get_high_low_triggers(trace, high_threshold, low_threshold, the bins where the trigger condition is satisfied """ n_bins_coincidence = int(np.round(time_coincidence / dt)) + 1 - c = np.ones(n_bins_coincidence, dtype=np.bool) + c = np.ones(n_bins_coincidence, dtype=bool) logger.debug("length of trace {} bins, coincidence window {} bins".format(len(trace), len(c))) c2 = np.array([1, -1]) @@ -70,7 +70,7 @@ def get_majority_logic(tts, number_of_coincidences=2, time_coincidence=32 * unit if(n_bins_coincidence > n): # reduce coincidence window to maximum trace length n_bins_coincidence = n logger.debug("specified coincidence window longer than tracelenght, reducing coincidence window to trace length") - c = np.ones(n_bins_coincidence, dtype=np.bool) + c = np.ones(n_bins_coincidence, dtype=bool) for i in range(len(tts)): logger.debug("get_majority_logic() length of trace {} bins, coincidence window {} bins".format(len(tts[i]), len(c))) From ddcc3d245f8425078745720c8a79bd5b4fd05bac Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 17:47:08 +0000 Subject: [PATCH 089/102] update reference value adapting to new noise --- NuRadioMC/test/Veff/1e18eV/T03check_output_noise.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioMC/test/Veff/1e18eV/T03check_output_noise.py b/NuRadioMC/test/Veff/1e18eV/T03check_output_noise.py index a78fe4bfc..f1ebfc01f 100755 --- a/NuRadioMC/test/Veff/1e18eV/T03check_output_noise.py +++ b/NuRadioMC/test/Veff/1e18eV/T03check_output_noise.py @@ -11,7 +11,7 @@ # the event generation has a fixed seed and I switched to Alvarez2000 (also no randomness) # thus, the Veff has no statistical scatter -Veff_mean = 8.17491 +Veff_mean = 7.86364 Veff_sigma = 0.0001 path = os.path.dirname(os.path.abspath(__file__)) From 4e9e248a83cd329e42201aa8851c7dc7ef9fc767 Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 18:11:23 +0000 Subject: [PATCH 090/102] update reference values due to new noise reaslizations --- .../test/tiny_reconstruction/reference.json | 345 +----------------- 1 file changed, 1 insertion(+), 344 deletions(-) diff --git a/NuRadioReco/test/tiny_reconstruction/reference.json b/NuRadioReco/test/tiny_reconstruction/reference.json index 40c6b9def..968d3584b 100644 --- a/NuRadioReco/test/tiny_reconstruction/reference.json +++ b/NuRadioReco/test/tiny_reconstruction/reference.json @@ -1,344 +1 @@ -{ - "0" : { - "station_parameters" : { - "nu_zenith" : null, - "nu_azimuth" : null, - "nu_energy" : null, - "nu_flavor" : null, - "ccnc" : null, - "nu_vertex" : null, - "inelasticity" : null, - "triggered" : null, - "cr_energy" : null, - "cr_zenith" : null, - "cr_azimuth" : null, - "channels_max_amplitude" : 0.08853022471996891, - "zenith" : 0.79, - "azimuth" : 3.96, - "zenith_cr_templatefit" : null, - "zenith_nu_templatefit" : null, - "cr_xcorrelations" : null, - "nu_xcorrelations" : null, - "station_time" : null, - "cr_energy_em" : null, - "nu_inttype" : null, - "chi2_efield_time_direction_fit" : null, - "ndf_efield_time_direction_fit" : null, - "cr_xmax" : null, - "vertex_2D_fit" : null, - "distance_correlations" : null - }, - "sim_station_parameters" : { - "nu_zenith" : null, - "nu_azimuth" : null, - "nu_energy" : null, - "nu_flavor" : null, - "ccnc" : null, - "nu_vertex" : null, - "inelasticity" : null, - "triggered" : null, - "cr_energy" : 1.58489319246E18, - "cr_zenith" : null, - "cr_azimuth" : null, - "channels_max_amplitude" : null, - "zenith" : 0.7853981633974483, - "azimuth" : 3.957853280977215, - "zenith_cr_templatefit" : null, - "zenith_nu_templatefit" : null, - "cr_xcorrelations" : null, - "nu_xcorrelations" : null, - "station_time" : null, - "cr_energy_em" : 1.39187479744259994E18, - "nu_inttype" : null, - "chi2_efield_time_direction_fit" : null, - "ndf_efield_time_direction_fit" : null, - "cr_xmax" : 646.2024663, - "vertex_2D_fit" : null, - "distance_correlations" : null - }, - "channel_parameters" : { - "zenith" : [ null, null, null, null ], - "azimuth" : [ null, null, null, null ], - "maximum_amplitude" : [ 0.0873141653492205, 0.08853022471996891, 0.07885012049626394, 0.07486104950522432 ], - "SNR" : [ { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.004149416647868447, - "peak_amplitude" : 0.004225325303006149, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.0061325770929495016, - "peak_amplitude" : 0.006227798651940198, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.0038091911965324092, - "peak_amplitude" : 0.0038590946944723325, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.003328271897442733, - "peak_amplitude" : 0.0034583171387571744, - "Seckel_2_noise" : 5 - } ], - "maximum_amplitude_envelope" : [ 0.08772020520899446, 0.08892789858406401, 0.07966830072878535, 0.08297084302728261 ], - "P2P_amplitude" : [ 0.1724152251327864, 0.16341917302469117, 0.146681542086196, 0.1458075155635604 ], - "cr_xcorrelations" : [ null, null, null, null ], - "nu_xcorrelations" : [ null, null, null, null ], - "signal_time" : [ -168.2559785714568, -171.65597857145679, -183.2559785714568, -173.85597857145672 ], - "noise_rms" : [ 0.008836023847618458, 0.008955465322914774, 0.008881819339363746, 0.009304834436981291 ], - "signal_regions" : [ null, null, null, null ], - "noise_regions" : [ null, null, null, null ], - "signal_time_offset" : [ null, null, null, null ], - "signal_receiving_zenith" : [ null, null, null, null ], - "signal_ray_type" : [ null, null, null, null ], - "signal_receiving_azimuth" : [ null, null, null, null ] - }, - "electric_field_parameters" : { - "ray_path_type" : [ null, null ], - "polarization_angle" : [ 1.437014716504628, 1.505812376998251 ], - "polarization_angle_expectation" : [ 1.3259385237455839, -1.8156541298442093 ], - "signal_energy_fluence" : [ [ 0.0, 0.2505871320257097, 13.83446329404234 ], [ 0.0, 0.07493732342984019, 17.695470024342303 ] ], - "cr_spectrum_slope" : [ null, -5.071257963761185 ], - "zenith" : [ 0.79, 0.79 ], - "azimuth" : [ 3.96, 3.96 ], - "signal_time" : [ -254.95450631365475, null ], - "nu_vertex_distance" : [ null, null ], - "nu_viewing_angle" : [ null, null ], - "max_amp_antenna" : [ null, null ], - "max_amp_antenna_envelope" : [ null, null ], - "reflection_coefficient_theta" : [ null, null ], - "reflection_coefficient_phi" : [ null, null ], - "cr_spectrum_quadratic_term" : [ null, 3.7278692487503235 ], - "energy_fluence_ratios" : [ null, null ] - } - }, - "1" : { - "station_parameters" : { - "nu_zenith" : null, - "nu_azimuth" : null, - "nu_energy" : null, - "nu_flavor" : null, - "ccnc" : null, - "nu_vertex" : null, - "inelasticity" : null, - "triggered" : null, - "cr_energy" : null, - "cr_zenith" : null, - "cr_azimuth" : null, - "channels_max_amplitude" : 0.33359702926988355, - "zenith" : 0.78, - "azimuth" : 3.95, - "zenith_cr_templatefit" : null, - "zenith_nu_templatefit" : null, - "cr_xcorrelations" : null, - "nu_xcorrelations" : null, - "station_time" : null, - "cr_energy_em" : null, - "nu_inttype" : null, - "chi2_efield_time_direction_fit" : null, - "ndf_efield_time_direction_fit" : null, - "cr_xmax" : null, - "vertex_2D_fit" : null, - "distance_correlations" : null - }, - "sim_station_parameters" : { - "nu_zenith" : null, - "nu_azimuth" : null, - "nu_energy" : null, - "nu_flavor" : null, - "ccnc" : null, - "nu_vertex" : null, - "inelasticity" : null, - "triggered" : null, - "cr_energy" : 1.58489319246E18, - "cr_zenith" : null, - "cr_azimuth" : null, - "channels_max_amplitude" : null, - "zenith" : 0.7853981633974483, - "azimuth" : 3.957853280977215, - "zenith_cr_templatefit" : null, - "zenith_nu_templatefit" : null, - "cr_xcorrelations" : null, - "nu_xcorrelations" : null, - "station_time" : null, - "cr_energy_em" : 1.39187479744259994E18, - "nu_inttype" : null, - "chi2_efield_time_direction_fit" : null, - "ndf_efield_time_direction_fit" : null, - "cr_xmax" : 646.2024663, - "vertex_2D_fit" : null, - "distance_correlations" : null - }, - "channel_parameters" : { - "zenith" : [ null, null, null, null ], - "azimuth" : [ null, null, null, null ], - "maximum_amplitude" : [ 0.2835856575324592, 0.33359702926988355, 0.2791941553632823, 0.3256776186469762 ], - "SNR" : [ { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.00291206901456176, - "peak_amplitude" : 0.002929725052658198, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.00340572893988943, - "peak_amplitude" : 0.0034558407562806964, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.006845057653623751, - "peak_amplitude" : 0.00701923788399404, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.007164164413376706, - "peak_amplitude" : 0.007202262656811785, - "Seckel_2_noise" : 5 - } ], - "maximum_amplitude_envelope" : [ 0.3003272394395557, 0.3399598814912192, 0.29236891303881996, 0.35467083189765575 ], - "P2P_amplitude" : [ 0.5632116726571561, 0.6637288453900938, 0.554719957942302, 0.6479974651404222 ], - "cr_xcorrelations" : [ null, null, null, null ], - "nu_xcorrelations" : [ null, null, null, null ], - "signal_time" : [ 587.7440214285432, 589.9440214285432, 574.9440214285432, 571.3440214285433 ], - "noise_rms" : [ 0.009006474050937649, 0.008732105958313228, 0.007788519158762668, 0.009397032099363559 ], - "signal_regions" : [ null, null, null, null ], - "noise_regions" : [ null, null, null, null ], - "signal_time_offset" : [ null, null, null, null ], - "signal_receiving_zenith" : [ null, null, null, null ], - "signal_ray_type" : [ null, null, null, null ], - "signal_receiving_azimuth" : [ null, null, null, null ] - }, - "electric_field_parameters" : { - "ray_path_type" : [ null, null ], - "polarization_angle" : [ 1.423905497514318, 1.4409019604509443 ], - "polarization_angle_expectation" : [ 1.3232105351636871, -1.818382118426106 ], - "signal_energy_fluence" : [ [ 0.0, 6.607535268712784, 301.8361992200022 ], [ 0.0, 5.437314688159896, 318.63935252848177 ] ], - "cr_spectrum_slope" : [ null, -6.6760948093020485 ], - "zenith" : [ 0.78, 0.78 ], - "azimuth" : [ 3.95, 3.95 ], - "signal_time" : [ 499.91208204952255, null ], - "nu_vertex_distance" : [ null, null ], - "nu_viewing_angle" : [ null, null ], - "max_amp_antenna" : [ null, null ], - "max_amp_antenna_envelope" : [ null, null ], - "reflection_coefficient_theta" : [ null, null ], - "reflection_coefficient_phi" : [ null, null ], - "cr_spectrum_quadratic_term" : [ null, -2.215049003838984 ], - "energy_fluence_ratios" : [ null, null ] - } - }, - "2" : { - "station_parameters" : { - "nu_zenith" : null, - "nu_azimuth" : null, - "nu_energy" : null, - "nu_flavor" : null, - "ccnc" : null, - "nu_vertex" : null, - "inelasticity" : null, - "triggered" : null, - "cr_energy" : null, - "cr_zenith" : null, - "cr_azimuth" : null, - "channels_max_amplitude" : 0.28471716111983614, - "zenith" : 0.78, - "azimuth" : 3.95, - "zenith_cr_templatefit" : null, - "zenith_nu_templatefit" : null, - "cr_xcorrelations" : null, - "nu_xcorrelations" : null, - "station_time" : null, - "cr_energy_em" : null, - "nu_inttype" : null, - "chi2_efield_time_direction_fit" : null, - "ndf_efield_time_direction_fit" : null, - "cr_xmax" : null, - "vertex_2D_fit" : null, - "distance_correlations" : null - }, - "sim_station_parameters" : { - "nu_zenith" : null, - "nu_azimuth" : null, - "nu_energy" : null, - "nu_flavor" : null, - "ccnc" : null, - "nu_vertex" : null, - "inelasticity" : null, - "triggered" : null, - "cr_energy" : 1.58489319246E18, - "cr_zenith" : null, - "cr_azimuth" : null, - "channels_max_amplitude" : null, - "zenith" : 0.7853981633974483, - "azimuth" : 3.957853280977215, - "zenith_cr_templatefit" : null, - "zenith_nu_templatefit" : null, - "cr_xcorrelations" : null, - "nu_xcorrelations" : null, - "station_time" : null, - "cr_energy_em" : 1.39187479744259994E18, - "nu_inttype" : null, - "chi2_efield_time_direction_fit" : null, - "ndf_efield_time_direction_fit" : null, - "cr_xmax" : 646.2024663, - "vertex_2D_fit" : null, - "distance_correlations" : null - }, - "channel_parameters" : { - "zenith" : [ null, null, null, null ], - "azimuth" : [ null, null, null, null ], - "maximum_amplitude" : [ 0.2617580175047184, 0.27712092224880147, 0.2555336277305896, 0.28471716111983614 ], - "SNR" : [ { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.0043472269995871535, - "peak_amplitude" : 0.0045710624556043215, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.003710424290754701, - "peak_amplitude" : 0.0037252751132054716, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.00777307702513199, - "peak_amplitude" : 0.007907951982053118, - "Seckel_2_noise" : 5 - }, { - "integrated_power" : 0.0, - "peak_2_peak_amplitude" : 0.008133561831805624, - "peak_amplitude" : 0.008336373693178935, - "Seckel_2_noise" : 5 - } ], - "maximum_amplitude_envelope" : [ 0.2653837960611956, 0.2969067177063586, 0.270376775534144, 0.298365691136913 ], - "P2P_amplitude" : [ 0.5188798774102936, 0.550291044285413, 0.5046288232897831, 0.5481116482309814 ], - "cr_xcorrelations" : [ null, null, null, null ], - "nu_xcorrelations" : [ null, null, null, null ], - "signal_time" : [ -134.05597857145676, -132.85597857145672, -146.85597857145672, -144.85597857145672 ], - "noise_rms" : [ 0.009190485861932177, 0.009505810656701827, 0.00886260343524874, 0.0091532389582909 ], - "signal_regions" : [ null, null, null, null ], - "noise_regions" : [ null, null, null, null ], - "signal_time_offset" : [ null, null, null, null ], - "signal_receiving_zenith" : [ null, null, null, null ], - "signal_ray_type" : [ null, null, null, null ], - "signal_receiving_azimuth" : [ null, null, null, null ] - }, - "electric_field_parameters" : { - "ray_path_type" : [ null, null ], - "polarization_angle" : [ 1.4819734258513997, 1.5006361258682528 ], - "polarization_angle_expectation" : [ 1.3232105351636871, -1.818382118426106 ], - "signal_energy_fluence" : [ [ 0.0, 2.5899529944532245, 326.5528775660024 ], [ 0.0, 2.0442071775847332, 413.9200130611196 ] ], - "cr_spectrum_slope" : [ null, -1.7873921532547352 ], - "zenith" : [ 0.78, 0.78 ], - "azimuth" : [ 3.95, 3.95 ], - "signal_time" : [ -193.88791795047757, null ], - "nu_vertex_distance" : [ null, null ], - "nu_viewing_angle" : [ null, null ], - "max_amp_antenna" : [ null, null ], - "max_amp_antenna_envelope" : [ null, null ], - "reflection_coefficient_theta" : [ null, null ], - "reflection_coefficient_phi" : [ null, null ], - "cr_spectrum_quadratic_term" : [ null, 0.4769503271885924 ], - "energy_fluence_ratios" : [ null, null ] - } - } -} \ No newline at end of file +{"0": {"station_parameters": {"nu_zenith": null, "nu_azimuth": null, "nu_energy": null, "nu_flavor": null, "ccnc": null, "nu_vertex": null, "inelasticity": null, "triggered": null, "cr_energy": null, "cr_zenith": null, "cr_azimuth": null, "channels_max_amplitude": 0.09219726935932945, "zenith": 0.76585400390625, "azimuth": 3.94125048828125, "zenith_cr_templatefit": null, "zenith_nu_templatefit": null, "cr_xcorrelations": null, "nu_xcorrelations": null, "station_time": null, "cr_energy_em": null, "nu_inttype": null, "chi2_efield_time_direction_fit": null, "ndf_efield_time_direction_fit": null, "cr_xmax": null, "vertex_2D_fit": null, "distance_correlations": null, "shower_energy": null, "viewing_angles": null}, "sim_station_parameters": {"nu_zenith": null, "nu_azimuth": null, "nu_energy": null, "nu_flavor": null, "ccnc": null, "nu_vertex": null, "inelasticity": null, "triggered": null, "cr_energy": 1.58489319246e+18, "cr_zenith": null, "cr_azimuth": null, "channels_max_amplitude": null, "zenith": 0.7853981633974483, "azimuth": 3.957853280977215, "zenith_cr_templatefit": null, "zenith_nu_templatefit": null, "cr_xcorrelations": null, "nu_xcorrelations": null, "station_time": null, "cr_energy_em": 1.3918747974426e+18, "nu_inttype": null, "chi2_efield_time_direction_fit": null, "ndf_efield_time_direction_fit": null, "cr_xmax": 646.2024663, "vertex_2D_fit": null, "distance_correlations": null, "shower_energy": null, "viewing_angles": null}, "channel_parameters": {"zenith": [null, null, null, null], "azimuth": [null, null, null, null], "maximum_amplitude": [0.0741114148547873, 0.07822416425569524, 0.08019365868532256, 0.09219726935932945], "SNR": [{"integrated_power": 0.0, "peak_2_peak_amplitude": 0.004780092071917627, "peak_amplitude": 0.00485887816779479, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.0029781482057526206, "peak_amplitude": 0.0030899574970230446, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.002963497087732319, "peak_amplitude": 0.0029912274254951183, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.003315202825440897, "peak_amplitude": 0.0035027540130119802, "Seckel_2_noise": 5}], "maximum_amplitude_envelope": [0.0789854151249416, 0.0850534211758587, 0.08035990802734218, 0.09425391321371974], "P2P_amplitude": [0.13819712244096005, 0.15451260952068696, 0.15292399225172826, 0.17650992826729145], "cr_xcorrelations": [null, null, null, null], "nu_xcorrelations": [null, null, null, null], "signal_time": [-165.45597857145674, -169.65597857145679, -182.85597857145672, -181.05597857145676], "noise_rms": [0.009497729224153949, 0.008938569434903116, 0.008083395580028708, 0.009418626437748765], "signal_regions": [null, null, null, null], "noise_regions": [null, null, null, null], "signal_time_offset": [null, null, null, null], "signal_receiving_zenith": [null, null, null, null], "signal_ray_type": [null, null, null, null], "signal_receiving_azimuth": [null, null, null, null]}, "electric_field_parameters": {"ray_path_type": [null, null], "polarization_angle": [1.4654396095601694, 1.4895721505795514], "polarization_angle_expectation": [1.3193420263512765, -1.8222506272385168], "signal_energy_fluence": [[0.0, 0.1499228726724943, 13.406681586865194], [0.0, 0.1138089722425281, 17.17484362157034]], "cr_spectrum_slope": [null, -5.857288373436567], "zenith": [0.76585400390625, 0.76585400390625], "azimuth": [3.94125048828125, 3.94125048828125], "signal_time": [-256.0418188450112, null], "nu_vertex_distance": [null, null], "nu_viewing_angle": [null, null], "max_amp_antenna": [null, null], "max_amp_antenna_envelope": [null, null], "reflection_coefficient_theta": [null, null], "reflection_coefficient_phi": [null, null], "cr_spectrum_quadratic_term": [null, 5.585796124606195], "energy_fluence_ratios": [null, null]}}, "1": {"station_parameters": {"nu_zenith": null, "nu_azimuth": null, "nu_energy": null, "nu_flavor": null, "ccnc": null, "nu_vertex": null, "inelasticity": null, "triggered": null, "cr_energy": null, "cr_zenith": null, "cr_azimuth": null, "channels_max_amplitude": 0.34265971445728505, "zenith": 0.79, "azimuth": 3.96, "zenith_cr_templatefit": null, "zenith_nu_templatefit": null, "cr_xcorrelations": null, "nu_xcorrelations": null, "station_time": null, "cr_energy_em": null, "nu_inttype": null, "chi2_efield_time_direction_fit": null, "ndf_efield_time_direction_fit": null, "cr_xmax": null, "vertex_2D_fit": null, "distance_correlations": null, "shower_energy": null, "viewing_angles": null}, "sim_station_parameters": {"nu_zenith": null, "nu_azimuth": null, "nu_energy": null, "nu_flavor": null, "ccnc": null, "nu_vertex": null, "inelasticity": null, "triggered": null, "cr_energy": 1.58489319246e+18, "cr_zenith": null, "cr_azimuth": null, "channels_max_amplitude": null, "zenith": 0.7853981633974483, "azimuth": 3.957853280977215, "zenith_cr_templatefit": null, "zenith_nu_templatefit": null, "cr_xcorrelations": null, "nu_xcorrelations": null, "station_time": null, "cr_energy_em": 1.3918747974426e+18, "nu_inttype": null, "chi2_efield_time_direction_fit": null, "ndf_efield_time_direction_fit": null, "cr_xmax": 646.2024663, "vertex_2D_fit": null, "distance_correlations": null, "shower_energy": null, "viewing_angles": null}, "channel_parameters": {"zenith": [null, null, null, null], "azimuth": [null, null, null, null], "maximum_amplitude": [0.29720895105426465, 0.34265971445728505, 0.28631950709258547, 0.3355886336552229], "SNR": [{"integrated_power": 0.0, "peak_2_peak_amplitude": 0.006919176246584566, "peak_amplitude": 0.007147260712361315, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.0047795421977155994, "peak_amplitude": 0.00483475221965112, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.007958346991753162, "peak_amplitude": 0.00808818541204719, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.006171263679930979, "peak_amplitude": 0.00634102083568216, "Seckel_2_noise": 5}], "maximum_amplitude_envelope": [0.30136927893948445, 0.3430368419418388, 0.2883085667945778, 0.3430413736731565], "P2P_amplitude": [0.5781561921741288, 0.6591005848543372, 0.5651166372487595, 0.6691483403791307], "cr_xcorrelations": [null, null, null, null], "nu_xcorrelations": [null, null, null, null], "signal_time": [583.9440214285432, 585.5440214285434, 570.5440214285434, 575.1440214285433], "noise_rms": [0.009359269057858015, 0.009003414652873262, 0.008341757854147532, 0.009479378300635246], "signal_regions": [null, null, null, null], "noise_regions": [null, null, null, null], "signal_time_offset": [null, null, null, null], "signal_receiving_zenith": [null, null, null, null], "signal_ray_type": [null, null, null, null], "signal_receiving_azimuth": [null, null, null, null]}, "electric_field_parameters": {"ray_path_type": [null, null], "polarization_angle": [1.4164838522395478, 1.4309598632203113], "polarization_angle_expectation": [1.3259385237455839, -1.8156541298442093], "signal_energy_fluence": [[0.0, 7.388558320124647, 305.36881995591784], [0.0, 6.420446226373949, 324.0685176647595]], "cr_spectrum_slope": [null, -6.704957977720079], "zenith": [0.79, 0.79], "azimuth": [3.96, 3.96], "signal_time": [500.04549368634525, null], "nu_vertex_distance": [null, null], "nu_viewing_angle": [null, null], "max_amp_antenna": [null, null], "max_amp_antenna_envelope": [null, null], "reflection_coefficient_theta": [null, null], "reflection_coefficient_phi": [null, null], "cr_spectrum_quadratic_term": [null, -2.2958549567008015], "energy_fluence_ratios": [null, null]}}, "2": {"station_parameters": {"nu_zenith": null, "nu_azimuth": null, "nu_energy": null, "nu_flavor": null, "ccnc": null, "nu_vertex": null, "inelasticity": null, "triggered": null, "cr_energy": null, "cr_zenith": null, "cr_azimuth": null, "channels_max_amplitude": 0.29293556379400426, "zenith": 0.78, "azimuth": 3.95, "zenith_cr_templatefit": null, "zenith_nu_templatefit": null, "cr_xcorrelations": null, "nu_xcorrelations": null, "station_time": null, "cr_energy_em": null, "nu_inttype": null, "chi2_efield_time_direction_fit": null, "ndf_efield_time_direction_fit": null, "cr_xmax": null, "vertex_2D_fit": null, "distance_correlations": null, "shower_energy": null, "viewing_angles": null}, "sim_station_parameters": {"nu_zenith": null, "nu_azimuth": null, "nu_energy": null, "nu_flavor": null, "ccnc": null, "nu_vertex": null, "inelasticity": null, "triggered": null, "cr_energy": 1.58489319246e+18, "cr_zenith": null, "cr_azimuth": null, "channels_max_amplitude": null, "zenith": 0.7853981633974483, "azimuth": 3.957853280977215, "zenith_cr_templatefit": null, "zenith_nu_templatefit": null, "cr_xcorrelations": null, "nu_xcorrelations": null, "station_time": null, "cr_energy_em": 1.3918747974426e+18, "nu_inttype": null, "chi2_efield_time_direction_fit": null, "ndf_efield_time_direction_fit": null, "cr_xmax": 646.2024663, "vertex_2D_fit": null, "distance_correlations": null, "shower_energy": null, "viewing_angles": null}, "channel_parameters": {"zenith": [null, null, null, null], "azimuth": [null, null, null, null], "maximum_amplitude": [0.2565625777544711, 0.2778654985637024, 0.2672414043783895, 0.29293556379400426], "SNR": [{"integrated_power": 0.0, "peak_2_peak_amplitude": 0.007948759818769018, "peak_amplitude": 0.008209754518559166, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.002601223824751355, "peak_amplitude": 0.0026817225129317146, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.008923287811149969, "peak_amplitude": 0.009007262903406938, "Seckel_2_noise": 5}, {"integrated_power": 0.0, "peak_2_peak_amplitude": 0.0068025654072232655, "peak_amplitude": 0.007054720375806741, "Seckel_2_noise": 5}], "maximum_amplitude_envelope": [0.2618281394747253, 0.28224029243565324, 0.26779467480180347, 0.2932426286212969], "P2P_amplitude": [0.5044222411860875, 0.547608219972491, 0.5318703949237792, 0.5851999240317709], "cr_xcorrelations": [null, null, null, null], "nu_xcorrelations": [null, null, null, null], "signal_time": [-102.65597857145667, -133.45597857145674, -148.2559785714567, -147.45597857145674], "noise_rms": [0.008359098917345579, 0.00848077598024109, 0.008989153845587915, 0.008201090181413125], "signal_regions": [null, null, null, null], "noise_regions": [null, null, null, null], "signal_time_offset": [null, null, null, null], "signal_receiving_zenith": [null, null, null, null], "signal_ray_type": [null, null, null, null], "signal_receiving_azimuth": [null, null, null, null]}, "electric_field_parameters": {"ray_path_type": [null, null], "polarization_angle": [1.4786245054415663, 1.5037562254029073], "polarization_angle_expectation": [1.3232105351636871, -1.818382118426106], "signal_energy_fluence": [[0.0, 2.8633662095356187, 335.1320229092165], [0.0, 1.9443851451542769, 431.3306782776532]], "cr_spectrum_slope": [null, -1.715922037957672], "zenith": [0.78, 0.78], "azimuth": [3.95, 3.95], "signal_time": [-193.88791795047757, null], "nu_vertex_distance": [null, null], "nu_viewing_angle": [null, null], "max_amp_antenna": [null, null], "max_amp_antenna_envelope": [null, null], "reflection_coefficient_theta": [null, null], "reflection_coefficient_phi": [null, null], "cr_spectrum_quadratic_term": [null, 0.5209156977104409], "energy_fluence_ratios": [null, null]}}} \ No newline at end of file From 417aec21e4a84391536815f7ffb9614605cb32ff Mon Sep 17 00:00:00 2001 From: Christian Glaser Date: Tue, 16 May 2023 18:19:17 +0000 Subject: [PATCH 091/102] update radiotools version with the same numpy fixes --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7fb653fb4..e5e0b4292 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ tinydb = ">=4.1.1" tinydb-serialization = ">=2.1" aenum = "*" astropy = "*" -radiotools = ">=0.2" +radiotools = ">=0.2.1" cython = "*" dash = ">=2.0" future = "*" From 7f0d678681c0f1d0453df86b1836fe9d5029c5d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= <30903175+fschlueter@users.noreply.github.com> Date: Fri, 19 May 2023 12:06:04 +0200 Subject: [PATCH 092/102] Update readRNOGDataMattak.py Fix error logging: use updated git repo --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index a61dfa8fc..f482735a2 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -155,7 +155,7 @@ def __init__(self, run_table_path=None): "Runs can not be filtered.") except ImportError: self.logger.warn("Import of run table failed. Runs can not be filtered.! \n" - "You can get the interface from GitHub: git@github.com:RNO-G/rnog-data-analysis-and-issues.git") + "You can get the interface from GitHub: git@github.com:RNO-G/rnog-runtable.git") else: import pandas self.__run_table = pandas.read_csv(run_table_path) From f84034b1f88a1880277149fe87e2b2322eeae174 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= <30903175+fschlueter@users.noreply.github.com> Date: Fri, 19 May 2023 13:40:39 +0200 Subject: [PATCH 093/102] Update readRNOGDataMattak.py Move logger to init --- .../modules/io/RNO_G/readRNOGDataMattak.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index f482735a2..65c85addb 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -132,14 +132,20 @@ def all_files_in_directory(mattak_dir): class readRNOGData: - def __init__(self, run_table_path=None): + def __init__(self, run_table_path=None, log_level=logging.INFO): """ Parameters ---------- run_table_path: str Path to a run_table.cvs file. If None, the run table is queried from the DB. (Default: None) + + log_level: enum + Set verbosity level of logger. If logging.DEBUG, set mattak to verbose (unless specified in mattak_kwargs). + (Default: logging.INFO) """ + self.logger = logging.getLogger('NuRadioReco.readRNOGData') + self.logger.setLevel(log_level) # Initialize run table for run selection self.__run_table = None @@ -173,8 +179,7 @@ def begin(self, run_time_range=None, max_trigger_rate=0 * units.Hz, mattak_kwargs={}, - overwrite_sampling_rate=None, - log_level=logging.INFO): + overwrite_sampling_rate=None): """ Parameters ---------- @@ -232,17 +237,10 @@ def begin(self, Set sampling rate of the imported waveforms. This overwrites what is read out from runinfo (i.e., stored in the mattak files). If None, nothing is overwritten and the sampling rate from the mattak file is used. (Default: None) NOTE: This option might be necessary when old mattak files are read which have this not set. - - log_level: enum - Set verbosity level of logger. If logging.DEBUG, set mattak to verbose (unless specified in mattak_kwargs). - (Default: logging.INFO) """ t0 = time.time() - self.logger = logging.getLogger('NuRadioReco.readRNOGData') - self.logger.setLevel(log_level) - self._read_calibrated_data = read_calibrated_data self._apply_baseline_correction = apply_baseline_correction self._convert_to_voltage = convert_to_voltage From f6da08792988c03d906c58ef55394875c732d042 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 19 May 2023 13:50:05 +0200 Subject: [PATCH 094/102] Fix reader and example --- NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py | 1 - NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py index 950e8c3cb..c215601bb 100644 --- a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py +++ b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py @@ -36,7 +36,6 @@ class EventInfo: rnog_reader.begin( list_of_root_files, selectors=selectors, - log_level=logging.INFO, # Currently false because Mattak does not contain calibrated data yet read_calibrated_data=False, # Only used when read_calibrated_data==False, performs a simple baseline subtraction each 128 bins diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 65c85addb..27e432f56 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -294,7 +294,7 @@ def begin(self, if "verbose" in mattak_kwargs: verbose = mattak_kwargs.pop("verbose") else: - verbose = log_level == logging.DEBUG + verbose = self.logger.level == logging.DEBUG for dir_file in dirs_files: From f767843e064e42667dd637130e4ce534101a1a37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Fri, 19 May 2023 14:24:31 +0200 Subject: [PATCH 095/102] Catch exceptions when reading runs with mattak --- .../RNO_data/read_data_example/read_rnog.py | 6 ++-- .../modules/io/RNO_G/readRNOGDataMattak.py | 28 ++++++++++++------- 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py index c215601bb..2855cbe7e 100644 --- a/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py +++ b/NuRadioReco/examples/RNO_data/read_data_example/read_rnog.py @@ -9,7 +9,7 @@ list_of_root_files = sys.argv[1:-1] output_filename = sys.argv[-1] -rnog_reader = readRNOGDataMattak.readRNOGData() +rnog_reader = readRNOGDataMattak.readRNOGData(log_level=logging.DEBUG) writer = eventWriter.eventWriter() """ @@ -53,11 +53,9 @@ class EventInfo: writer.begin(filename=output_filename) -for i_event, event in enumerate(rnog_reader.run()): - +for i_event, event in enumerate(rnog_reader.run()): writer.run(event) -print(i_event) rnog_reader.end() writer.end() diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 27e432f56..502c5185b 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -306,9 +306,13 @@ def begin(self, if not all_files_in_directory(dir_file): self.logger.error(f"Incomplete directory: {dir_file}. Skip ...") - continue - - dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=dir_file, verbose=verbose, **mattak_kwargs) + continue + + try: + dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=dir_file, verbose=verbose, **mattak_kwargs) + except Exception as e: + self.logger.error(f"The following Exeption was raised reading in the run: f{dir_file}. Skip that run ...: {e}") + continue else: raise NotImplementedError("The option to read in files is not implemented yet") @@ -474,7 +478,7 @@ def _filter_event(self, evtinfo, event_idx=None): for selector in self._selectors: if not selector(evtinfo): self.logger.debug(f"Event {event_idx} (station {evtinfo.station}, run {evtinfo.run}, " - f"event number {evtinfo.eventNumber}) is skipped.") + f"event number {evtinfo.eventNumber}) is skipped.") self.__skipped += 1 return True @@ -772,9 +776,13 @@ def get_event(self, run_nr, event_id): def end(self): - self.logger.info( - f"\n\tRead {self.__counter} events (skipped {self.__skipped} events, {self.__invalid} invalid events)" - f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" - f"\n\tTime to read all events : {self._time_run:.2f}s" - f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" - f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") + if self.__counter: + self.logger.info( + f"\n\tRead {self.__counter} events (skipped {self.__skipped} events, {self.__invalid} invalid events)" + f"\n\tTime to initialize data sets : {self._time_begin:.2f}s" + f"\n\tTime to read all events : {self._time_run:.2f}s" + f"\n\tTime to per event : {self._time_run / self.__counter:.2f}s" + f"\n\tRead {self.__n_runs} runs, skipped {self.__skipped_runs} runs.") + else: + self.logger.info( + f"\n\tRead {self.__counter} events (skipped {self.__skipped} events, {self.__invalid} invalid events)") \ No newline at end of file From 75cc622964f3254b6918c888207ce5af02d7a557 Mon Sep 17 00:00:00 2001 From: Alan Coleman Date: Mon, 22 May 2023 19:34:18 -0400 Subject: [PATCH 096/102] Update naming scheme to be consistent with analogToDigitalConverter --- NuRadioReco/utilities/noise.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/NuRadioReco/utilities/noise.py b/NuRadioReco/utilities/noise.py index 31bc02b4c..0fd45282b 100644 --- a/NuRadioReco/utilities/noise.py +++ b/NuRadioReco/utilities/noise.py @@ -274,8 +274,8 @@ def __init__(self, detector_filename, station_id, triggered_channels, self.pre_trigger_bins = int(pre_trigger_time * self.sampling_rate) self.n_samples_trigger = int(trace_length * self.sampling_rate) det_channel = self.det.get_channel(station_id, triggered_channels[0]) - self.adc_n_bits = det_channel["adc_nbits"] - self.adc_noise_n_bits = det_channel["adc_noise_nbits"] + self.adc_n_bits = det_channel["trigger_adc_nbits"] + self.adc_noise_n_bits = det_channel["trigger_adc_noise_nbits"] self.n_channels = len(triggered_channels) self.triggered_channels = triggered_channels @@ -317,7 +317,7 @@ def __init__(self, detector_filename, station_id, triggered_channels, self.filt = channelBandPassFilter.get_filter(self.ff, station_id, channel_id, self.det, passband=[96 * units.MHz, 100 * units.GHz], filter_type='cheby1', order=4, rp=0.1) self.filt *= channelBandPassFilter.get_filter(self.ff, station_id, channel_id, self.det, - passband=[0 * units.MHz, 220 * units.MHz], filter_type='cheby1', order=7, rp=0.1) + passband=[1 * units.MHz, 220 * units.MHz], filter_type='cheby1', order=7, rp=0.1) self.norm = np.trapz(np.abs(self.filt) ** 2, self.ff) self.amplitude = (self.max_freq - self.min_freq) ** 0.5 / self.norm ** 0.5 * self.Vrms print(f"Vrms = {self.Vrms:.3g}V, noise amplitude = {self.amplitude:.3g}V, bandwidth = {self.norm/units.MHz:.0f}MHz") From 6654b6674bb55c296348cfff6f3709a385a739f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 23 May 2023 10:56:55 +0200 Subject: [PATCH 097/102] Add specific Exceptions, add traceback information --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 502c5185b..ec741b626 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -14,6 +14,7 @@ from NuRadioReco.utilities import units import mattak.Dataset +import uproot # only needed to catch an exception def baseline_correction(wfs, n_bins=128, func=np.median): @@ -297,7 +298,7 @@ def begin(self, verbose = self.logger.level == logging.DEBUG for dir_file in dirs_files: - + if not os.path.exists(dir_file): self.logger.error(f"The directory/file {dir_file} does not exist") continue @@ -310,8 +311,11 @@ def begin(self, try: dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=dir_file, verbose=verbose, **mattak_kwargs) - except Exception as e: - self.logger.error(f"The following Exeption was raised reading in the run: f{dir_file}. Skip that run ...: {e}") + except (ReferenceError, uproot.exceptions.KeyInFileError) as e: + self.logger.error(f"The following exeption was raised reading in the run: {dir_file}. Skip that run ...:\n") + import traceback + traceback.print_exc() + print("") continue else: raise NotImplementedError("The option to read in files is not implemented yet") From ae9a23e6adf7c205e2e54c68ecacb1eb48bc8238 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 23 May 2023 10:58:11 +0200 Subject: [PATCH 098/102] fix log level comparison --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index ec741b626..2217a319b 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -295,7 +295,7 @@ def begin(self, if "verbose" in mattak_kwargs: verbose = mattak_kwargs.pop("verbose") else: - verbose = self.logger.level == logging.DEBUG + verbose = self.logger.level >= logging.DEBUG for dir_file in dirs_files: From 97942ed7a4c02c86d46e52baa671d6ce5106b3a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 23 May 2023 11:01:12 +0200 Subject: [PATCH 099/102] Use more general exception --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 2217a319b..983d1f1d2 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -14,7 +14,6 @@ from NuRadioReco.utilities import units import mattak.Dataset -import uproot # only needed to catch an exception def baseline_correction(wfs, n_bins=128, func=np.median): @@ -311,7 +310,7 @@ def begin(self, try: dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=dir_file, verbose=verbose, **mattak_kwargs) - except (ReferenceError, uproot.exceptions.KeyInFileError) as e: + except (ReferenceError, KeyError) as e: self.logger.error(f"The following exeption was raised reading in the run: {dir_file}. Skip that run ...:\n") import traceback traceback.print_exc() From 309a2ba069aaa8e1b5f1c7931b580bd80800df5c Mon Sep 17 00:00:00 2001 From: Sjoerd Bouma Date: Tue, 23 May 2023 11:32:46 +0200 Subject: [PATCH 100/102] use logger for traceback --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index 983d1f1d2..1ef173bd4 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -311,10 +311,7 @@ def begin(self, try: dataset = mattak.Dataset.Dataset(station=0, run=0, data_dir=dir_file, verbose=verbose, **mattak_kwargs) except (ReferenceError, KeyError) as e: - self.logger.error(f"The following exeption was raised reading in the run: {dir_file}. Skip that run ...:\n") - import traceback - traceback.print_exc() - print("") + self.logger.error(f"The following exeption was raised reading in the run: {dir_file}. Skip that run ...:\n", exc_info=e) continue else: raise NotImplementedError("The option to read in files is not implemented yet") From 99ec961414d9323946550c943d81952d359a7f3c Mon Sep 17 00:00:00 2001 From: Felix Date: Thu, 25 May 2023 19:04:20 +0200 Subject: [PATCH 101/102] Skip empty strings as noise folders. Might occure if noise folder not specified and causes the program to search uncontrolled for root files --- NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py index d3f18689f..c0c781c8b 100644 --- a/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py +++ b/NuRadioReco/modules/measured_noise/RNO_G/noiseImporter.py @@ -73,10 +73,13 @@ def begin(self, noise_folders, file_pattern="*", if not isinstance(noise_folders, list): noise_folders = [noise_folders] - + # find all subfolders noise_files = [] for noise_folder in noise_folders: + if noise_folder == "": + continue + noise_files += glob.glob(f"{noise_folder}/**/{file_pattern}root", recursive=True) self.__noise_folders = np.unique([os.path.dirname(e) for e in noise_files]) From 0894ca73a1bdab9046206194affc95a33466e9bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20Schl=C3=BCter?= Date: Tue, 30 May 2023 12:25:44 +0200 Subject: [PATCH 102/102] Fix bug in get_events_information which appears when calling that function twice --- NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py index fb19c7ed2..db2835ecb 100644 --- a/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py +++ b/NuRadioReco/modules/io/RNO_G/readRNOGDataMattak.py @@ -515,7 +515,8 @@ def get_events_information(self, keys=["station", "run", "eventNumber"]): if not do_read: # ... or when it does not have the desired information - first_event_info = next(iter(self._events_information)) + first_event_info = self._events_information[list(self._events_information.keys())[0]] + for key in keys: if key not in list(first_event_info.keys()): do_read = True