diff --git a/tools/icecube/.shed.yml b/tools/icecube/.shed.yml new file mode 100644 index 00000000..9b8669bf --- /dev/null +++ b/tools/icecube/.shed.yml @@ -0,0 +1,9 @@ +categories: +- Astronomy +description: Basic analysis of IceCube public data using skyllh package +homepage_url: null +long_description: Basic analysis of IceCube public data using skyllh package +name: icecube_astro_tool +owner: astroteam +remote_repository_url: https://github.com/esg-epfl-apc/tools-astro/tree/main/tools +type: unrestricted diff --git a/tools/icecube/Image.py b/tools/icecube/Image.py new file mode 100644 index 00000000..da9d4f31 --- /dev/null +++ b/tools/icecube/Image.py @@ -0,0 +1,260 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +import numpy as np +from astropy import wcs +from astropy.coordinates import SkyCoord +from astropy.io import fits +from matplotlib import pyplot as plt +from numpy import cos, pi +from oda_api.api import ProgressReporter +from oda_api.data_products import ImageDataProduct, PictureProduct +from oda_api.json import CustomJSONEncoder + +src_name = "NGC 1068" # http://odahub.io/ontology#AstrophysicalObject +RA = 40.669622 # http://odahub.io/ontology#PointOfInterestRA +DEC = -0.013294 # http://odahub.io/ontology#PointOfInterestDEC +# RA=308.65 # http://odahub.io/ontology#PointOfInterestRA +# DEC=40.9 # http://odahub.io/ontology#PointOfInterestDEC +# sigma=0.7 #http://odahub.io/ontology#AngleDegrees +IC40 = False # oda:Boolean +IC59 = False # oda:Boolean +IC79 = False # oda:Boolean +IC86_I = True # oda:Boolean +IC86_II_VII = True # oda:Boolean + +Radius = 1.0 # http://odahub.io/ontology#AngleDegrees +pixel_size = 0.5 # http://odahub.io/ontology#AngleDegrees +TSmap_type = "Fixed_slope" # http://odahub.io/ontology#String ; oda:allowed_value "Fixed_slope","Free_slope" +Slope = 3.0 # http://odahub.io/ontology#Float + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +from skyllh.analyses.i3.publicdata_ps.time_integrated_ps import create_analysis +from skyllh.core.config import Config +from skyllh.core.random import RandomStateService +from skyllh.core.source_model import PointLikeSource +from skyllh.datasets.i3.PublicData_10y_ps import create_dataset_collection + +periods = [] +if IC40 == True: + periods.append("IC40") +if IC59 == True: + periods.append("IC59") +if IC79 == True: + periods.append("IC79") +if IC86_I == True: + periods.append("IC86_I") +if IC86_II_VII == True: + periods.append("IC86_II-VII") +periods + +cfg = Config() +coords_s = SkyCoord(RA, DEC, unit="degree") +cdec = cos(DEC * pi / 180.0) +Npix = int(2 * (Radius / pixel_size)) + 1 +print(Npix) +RA_grid = np.linspace(RA - Radius / cdec, RA + Radius / cdec, Npix) +DEC_grid = np.linspace(DEC - Radius, DEC + Radius, Npix) +TS_map = np.zeros((Npix, Npix)) +ns_map = np.zeros((Npix, Npix)) +gamma_map = np.zeros((Npix, Npix)) + +if os.path.exists("20210126_PS-IC40-IC86_VII.zip") == False: + get_ipython().system( # noqa: F821 + "wget http://icecube.wisc.edu/data-releases/20210126_PS-IC40-IC86_VII.zip" + ) + get_ipython().system("unzip 20210126_PS-IC40-IC86_VII.zip") # noqa: F821 + +data_dir = os.getcwd() + "/icecube_10year_ps/" + +dsc = create_dataset_collection(cfg=cfg, base_path=data_dir) + +datasets = dsc[periods] +datasets + +rss = RandomStateService(seed=1) + +def process_pixel(index): + i = int(index / Npix) + j = index % Npix + print(i, j) + source = PointLikeSource( + ra=np.deg2rad(RA_grid[i]), dec=np.deg2rad(DEC_grid[j]) + ) + ana = create_analysis(cfg=cfg, datasets=datasets, source=source) + events_list = [data.exp for data in ana.data_list] + ana.initialize_trial(events_list) + (log_lambda_max, fitparam_values, status) = ana.llhratio.maximize(rss) + (ts, x, status) = ana.unblind(rss) + return RA_grid[i], DEC_grid[j], ts, x["ns"], x["gamma"] + +def process_pixel1(index): + i = int(index / Npix) + j = index % Npix + print(i, j) + source = PointLikeSource( + ra=np.deg2rad(RA_grid[i]), dec=np.deg2rad(DEC_grid[j]) + ) + ana = create_analysis(cfg=cfg, datasets=datasets, source=source) + events_list = [data.exp for data in ana.data_list] + ana.initialize_trial(events_list) + TS_profile = [] + counts = np.linspace(0, 200, 200) + for n in counts: + TS_profile.append(2 * ana.llhratio.evaluate([n, 3.0])[0]) + return max(TS_profile), counts[np.argmax(TS_profile)] + +# process_pixel(3) + +pr = ProgressReporter() +pr.report_progress(stage="Progress", progress=5.0) + +tsbest = 0 +ibest = 0 +jbest = 0 +if TSmap_type == "Fixed_slope": + for i, RRa in enumerate(RA_grid): + for j, DDec in enumerate(DEC_grid): + ind = i * Npix + j + TS_map[i, j], ns_map[i, j] = process_pixel1(ind) + if TS_map[i, j] > tsbest: + tsbest = TS_map[i, j] + ibest = i + jbest = j + print(RRa, DDec, tsbest) +else: + ncpu = int(os.cpu_count()) + ncpu + # with mp.Pool(3) as pool: + # res=pool.map(process_pixel, range(Npix**2)) + tsbest = 0 + for i, RRa in enumerate(RA_grid): + pr.report_progress( + stage="Progress", progress=5 + 95 * (i / len(RA_grid)) + ) + for j, DDec in enumerate(DEC_grid): + ind = i * Npix + j + r, d, TS_map[i, j], ns_map[i, j], gamma_map[i, j] = process_pixel( + ind + ) + if TS_map[i, j] > tsbest: + tsbest = TS_map[i, j] + ibest = i + jbest = j + print(RRa, DDec, tsbest) + +plt.imshow( + np.flip(np.transpose(TS_map), axis=1), + extent=( + max(RA_grid) + pixel_size / cdec / 2.0, + min(RA_grid) - pixel_size / cdec / 2.0, + min(DEC_grid) - pixel_size / 2.0, + max(DEC_grid) + pixel_size / 2.0, + ), + origin="lower", + aspect=1 / cdec, +) +plt.colorbar(label="TS") +# plt.scatter([RA_grid[ibest]],[DEC_grid[jbest]],marker='x',color='white') +plt.scatter([RA], [DEC], marker="x", color="white") +plt.text(RA, DEC + 0.5 * pixel_size, src_name, color="white") +plt.xlabel("Right Ascension, degrees") +plt.ylabel("Declination, degrees") +print(max(RA_grid), min(RA_grid), min(DEC_grid), max(DEC_grid)) + +# Create a new WCS object. The number of axes must be set +# from the start +w = wcs.WCS(naxis=2) + +w.wcs.ctype = ["RA---CAR", "DEC--CAR"] +# we need a Plate carrĂ©e (CAR) projection since histogram is binned by ra-dec +# the peculiarity here is that CAR projection produces rectilinear grid only if CRVAL2==0 +# also, we will follow convention of RA increasing from right to left (CDELT1<0, need to flip an input image) +# otherwise, aladin-lite doesn't show it +w.wcs.crval = [RA, 0] +w.wcs.crpix = [Npix / 2.0 + 0.5, 1 - DEC_grid[0] / pixel_size] +w.wcs.cdelt = np.array([-pixel_size / cdec, pixel_size]) + +header = w.to_header() + +hdu = fits.PrimaryHDU(np.flip(TS_map.T, axis=1), header=header) +hdu.writeto("Image.fits", overwrite=True) +hdu = fits.open("Image.fits") +im = hdu[0].data +wcs1 = wcs.WCS(hdu[0].header) +ax = plt.subplot(projection=wcs1) +lon = ax.coords["ra"] +lon.set_major_formatter("d.dd") +lat = ax.coords["dec"] +lat.set_major_formatter("d.dd") +plt.imshow(im, origin="lower") +plt.colorbar(label="TS") + +plt.scatter( + [RA], [DEC], marker="x", color="white", transform=ax.get_transform("world") +) +plt.text( + RA, + DEC + 0.5 * pixel_size, + src_name, + color="white", + transform=ax.get_transform("world"), +) + +plt.grid(color="white", ls="solid") +plt.xlabel("RA") +plt.ylabel("Dec") +plt.savefig("Image.png", format="png", bbox_inches="tight") + +fits_image = ImageDataProduct.from_fits_file("Image.fits") + +bin_image = PictureProduct.from_file("Image.png") + +get_ipython().system(" rm -rfv {data_dir}") # noqa: F821 + +png = bin_image # http://odahub.io/ontology#ODAPictureProduct +fits = fits_image # http://odahub.io/ontology#Image + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append(("out_Image_png", "png_galaxy.output", png)) +_oda_outs.append(("out_Image_fits", "fits_galaxy.output", fits)) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/icecube/Lightcurve.py b/tools/icecube/Lightcurve.py new file mode 100644 index 00000000..47e73f7d --- /dev/null +++ b/tools/icecube/Lightcurve.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +import numpy as np +import scipy.stats +from astropy.time import Time +from matplotlib import pyplot as plt +from oda_api.data_products import ODAAstropyTable, PictureProduct +from oda_api.json import CustomJSONEncoder +from skyllh.analyses.i3.publicdata_ps.time_dependent_ps import create_analysis +from skyllh.core.config import Config +from skyllh.core.random import RandomStateService + +# from skyllh.analyses.i3.publicdata_ps.time_integrated_ps import create_analysis +from skyllh.core.source_model import PointLikeSource +from skyllh.datasets.i3.PublicData_10y_ps import create_dataset_collection + +# src_name='NGC 1068' #http://odahub.io/ontology#AstrophysicalObject +RA = 40.669622 # http://odahub.io/ontology#PointOfInterestRA +DEC = 0.013294 # http://odahub.io/ontology#PointOfInterestDEC +Slope = 3.0 # http://odahub.io/ontology#Float + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +if os.path.exists("20210126_PS-IC40-IC86_VII.zip") == False: + get_ipython().system( # noqa: F821 + "wget http://icecube.wisc.edu/data-releases/20210126_PS-IC40-IC86_VII.zip" + ) + get_ipython().system("unzip 20210126_PS-IC40-IC86_VII.zip") # noqa: F821 + +data_dir = os.getcwd() + "/icecube_10year_ps/" + +cfg = Config() + +dsc = create_dataset_collection(cfg=cfg, base_path=data_dir) +periods = [ + "IC40", + "IC59", + "IC79", + "IC86_I", + "IC86_II", + "IC86_III", + "IC86_IV", + "IC86_V", + "IC86_VI", + "IC86_VII", +] + +Tstarts = [ + "2008-04-06T00:00:00.0", + "2009-05-20T00:00:00.0", + "2010-06-01T00:00:00.0", + "2011-05-13T00:00:00.0", + "2012-04-26T00:00:00.0", + "2013-05-02T00:00:00.0", + "2014-04-10T00:00:00.0", + "2015-04-24T00:00:00.0", + "2016-05-20T00:00:00.0", + "2017-05-18T00:00:00.0", +] +Tstops = [ + "2009-05-20T00:00:00.0", + "2010-05-31T00:00:00.0", + "2011-05-13T00:00:00.0", + "2012-05-15T00:00:00.0", + "2013-05-02T00:00:00.0", + "2014-05-06T00:00:00.0", + "2015-05-18T00:00:00.0", + "2016-05-20T00:00:00.0", + "2017-05-18T00:00:00.0", + "2018-07-08T00:00:00.0", +] +Tstarts = Time(Tstarts, format="isot", scale="utc").mjd +Tstops = Time(Tstops, format="isot", scale="utc").mjd + +source = PointLikeSource(ra=np.deg2rad(RA), dec=np.deg2rad(DEC)) + +chi2_68_quantile = scipy.stats.chi2.ppf(0.68, df=1) +chi2_90_quantile = scipy.stats.chi2.ppf(0.90, df=1) +chi2_95_quantile = scipy.stats.chi2.ppf(0.95, df=1) + +Fmin_68 = np.zeros(len(Tstarts)) +Fmax_68 = np.zeros(len(Tstarts)) +Fmax_90 = np.zeros(len(Tstarts)) +Fbest = np.zeros(len(Tstarts)) +for ind in range(len(Tstarts)): + print(periods[ind]) + datasets = dsc[periods[ind],] + ana = create_analysis( + cfg=cfg, + datasets=datasets, + source=source, + refplflux_gamma=2.0, + box={"start": Tstarts[ind], "stop": Tstops[ind]}, + ) + events_list = [data.exp for data in ana.data_list] + ana.initialize_trial(events_list) + rss = RandomStateService(seed=1) + (ts, x, status) = ana.unblind(minimizer_rss=rss) + print(f"TS = {ts:.3f}") + print(f'ns = {x["ns"]:.2f}') + print(f'gamma = {x["gamma"]:.2f}') + TS_profile = [] + counts = np.linspace(0, 200, 200) + for n in counts: + TS_profile.append(2 * ana.llhratio.evaluate([n, Slope])[0]) + plt.plot(counts, TS_profile) + tsmax = max(TS_profile) + cbest = counts[np.argmax(TS_profile)] + print(max(TS_profile), cbest) + mask = TS_profile > tsmax - chi2_68_quantile + cmin = min(counts[mask]) + cmax = max(counts[mask]) + print(cmin, cmax) + Fmin_68[ind] = ( + ana.calculate_fluxmodel_scaling_factor(cmin, [cmin, Slope]) * 1e3 * 3 + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + Fmax_68[ind] = ( + ana.calculate_fluxmodel_scaling_factor(cmax, [cmin, Slope]) * 1e3 * 3 + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + Fbest[ind] = ( + ana.calculate_fluxmodel_scaling_factor(cbest, [cmin, Slope]) * 1e3 * 3 + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + mask = TS_profile > tsmax - chi2_90_quantile + cmin = min(counts[mask]) + cmax = max(counts[mask]) + Fmax_90[ind] = ( + ana.calculate_fluxmodel_scaling_factor(cmax, [cmin, Slope]) * 1e3 * 3 + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + print("All-flavour:", Fmin_68[ind], Fmax_68[ind], Fbest[ind], Fmax_90[ind]) + +Tmeans = (Tstarts + Tstops) / 2.0 +Tdels = Tstops - Tstarts +plt.errorbar( + Tmeans, + Fbest, + yerr=[Fbest - Fmin_68, Fmax_68 - Fbest], + xerr=Tdels / 2.0, + color="black", + linewidth=2, +) +plt.errorbar( + Tmeans, + Fmax_90, + xerr=Tdels / 2.0, + yerr=Fmax_90 / 5.0, + uplims=np.ones(len(Tmeans)), + linestyle="none", + color="black", + alpha=0.5, + linewidth=2, +) +plt.xlabel("Time, MJD") +plt.ylabel(r"$E^2dN/dE$ at 1 TeV [TeV/(cm$^2$ s)], all flavours") +plt.savefig("Lightcurve.png", format="png", bbox_inches="tight") + +bin_image = PictureProduct.from_file("Lightcurve.png") +from astropy.table import Table + +data = [Tstarts, Tstops, Fbest, Fmin_68, Fmax_68, Fmax_90] +names = ( + "Tstart[MJD]", + "Tstop[MJD]", + "F[TeV/cm2s]", + "F_min_68[TeV/cm2s]", + "F_max_68[TeV/cm2s]", + "F_max_90[TeV/cm2s]", +) +lightcurve = ODAAstropyTable(Table(data, names=names)) + +get_ipython().system(" rm -rfv {data_dir}") # noqa: F821 + +lightcurve_png = bin_image # http://odahub.io/ontology#ODAPictureProduct +lightcurve_table = lightcurve # http://odahub.io/ontology#ODAAstropyTable + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append( + ( + "out_Lightcurve_lightcurve_png", + "lightcurve_png_galaxy.output", + lightcurve_png, + ) +) +_oda_outs.append( + ( + "out_Lightcurve_lightcurve_table", + "lightcurve_table_galaxy.output", + lightcurve_table, + ) +) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/icecube/Spectrum.py b/tools/icecube/Spectrum.py new file mode 100644 index 00000000..66a1fc1d --- /dev/null +++ b/tools/icecube/Spectrum.py @@ -0,0 +1,332 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +import numpy as np +import scipy.stats +from matplotlib import pyplot as plt +from oda_api.api import ProgressReporter +from oda_api.data_products import ODAAstropyTable, PictureProduct +from oda_api.json import CustomJSONEncoder + +src_name = "NGC 1068" # http://odahub.io/ontology#AstrophysicalObject +RA = 40.669622 # http://odahub.io/ontology#PointOfInterestRA +DEC = 0.013294 # http://odahub.io/ontology#PointOfInterestDEC +IC40 = False # http://odahub.io/ontology#Boolean +IC59 = False # http://odahub.io/ontology#Boolean +IC79 = False # http://odahub.io/ontology#Boolean +IC86_I = True # http://odahub.io/ontology#Boolean +IC86_II_VII = True # http://odahub.io/ontology#Boolean +Spectrum_type = "Free_slope" # http://odahub.io/ontology#String ; oda:allowed_value "Fixed_slope","Free_slope" +Slope = 3.0 # http://odahub.io/ontology#Float + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +from skyllh.analyses.i3.publicdata_ps.time_integrated_ps import create_analysis +from skyllh.core.config import Config +from skyllh.core.random import RandomStateService +from skyllh.core.source_model import PointLikeSource +from skyllh.datasets.i3.PublicData_10y_ps import create_dataset_collection + +periods = [] +if IC40 == True: + periods.append("IC40") +if IC59 == True: + periods.append("IC59") +if IC79 == True: + periods.append("IC79") +if IC86_I == True: + periods.append("IC86_I") +if IC86_II_VII == True: + periods.append("IC86_II-VII") +periods + +cfg = Config() + +if os.path.exists("20210126_PS-IC40-IC86_VII.zip") == False: + get_ipython().system( # noqa: F821 + "wget http://icecube.wisc.edu/data-releases/20210126_PS-IC40-IC86_VII.zip" + ) + get_ipython().system("unzip 20210126_PS-IC40-IC86_VII.zip") # noqa: F821 + +data_dir = os.getcwd() + "/icecube_10year_ps/" + +dsc = create_dataset_collection(cfg=cfg, base_path=data_dir) +dsc.dataset_names + +datasets = dsc[periods] +datasets + +source = PointLikeSource(ra=np.deg2rad(RA), dec=np.deg2rad(DEC)) + +ana = create_analysis(cfg=cfg, datasets=datasets, source=source) + +events_list = [data.exp for data in ana.data_list] +ana.initialize_trial(events_list) + +pr = ProgressReporter() +pr.report_progress(stage="Progress", progress=5.0) + +rss = RandomStateService(seed=1) +(ts, x, status) = ana.unblind(minimizer_rss=rss) + +print(f"TS = {ts:.3f}") +print(f'ns = {x["ns"]:.2f}') +print(f'gamma = {x["gamma"]:.2f}') + +if Spectrum_type == "Fixed_slope": + df = 1 + chi2_68_quantile = scipy.stats.chi2.ppf(0.68, df=df) + chi2_90_quantile = scipy.stats.chi2.ppf(0.90, df=df) + chi2_95_quantile = scipy.stats.chi2.ppf(0.95, df=df) + TS_profile = [] + counts = np.linspace(0, 200, 200) + for n in counts: + TS_profile.append(2 * ana.llhratio.evaluate([n, Slope])[0]) + # plt.plot(counts,TS_profile) + tsmax = max(TS_profile) + print(max(TS_profile)) + cbest = counts[np.argmax(TS_profile)] + mask = TS_profile > tsmax - chi2_68_quantile + cmin = min(counts[mask]) + cmax = max(counts[mask]) + print(cmin, cmax) + Fmin_68 = ana.calculate_fluxmodel_scaling_factor( + cmin, [cmin, Slope] + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + Fmax_68 = ana.calculate_fluxmodel_scaling_factor( + cmax, [cmin, Slope] + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + mask = TS_profile > tsmax - chi2_90_quantile + cmin = min(counts[mask]) + cmax = max(counts[mask]) + Fmin_90 = ana.calculate_fluxmodel_scaling_factor( + cmin, [cmin, Slope] + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + Fmax_90 = ana.calculate_fluxmodel_scaling_factor( + cmax, [cmin, Slope] + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + Fbest = ana.calculate_fluxmodel_scaling_factor( + cbest, [cmin, Slope] + ) # 1/(GeV s cm^2 sr) at 1e3 GeV + x = np.logspace(0, 2, 10) # energy in TeV + ymin_68 = ( + 3 * Fmin_68 * (x / 1) ** (2 - Slope) * 1e3 + ) # in TeV/cm2s, all flavours + ymax_68 = 3 * Fmax_68 * (x / 1) ** (2 - Slope) * 1e3 + ymax_90 = 3 * Fmax_90 * (x / 1) ** (2 - Slope) * 1e3 + ybest = 3 * Fbest * (x / 1) ** (2 - Slope) * 1e3 + + if np.amax(TS_profile) > chi2_95_quantile: + plt.fill_between(x, ymin_68, ymax_68, alpha=0.5, label="68% error") + plt.plot(x, ybest, color="black") + plt.plot(x, ymax_90, color="black", linewidth=4, label="90% UL") + plt.annotate( + "", + xy=(x[4], ymax_90[4] / 2.0), + xytext=(x[4], ymax_90[4]), + arrowprops=dict(facecolor="black", shrink=0.05), + ) + + plt.xscale("log") + plt.yscale("log") + plt.xlabel("$E$, TeV") + # plt.ylim(1e-14,3e-10) + plt.ylabel("$E^2 dN/dE$, TeV/(cm$^2$s), all flavours") + plt.savefig("Spectrum.png", format="png", bbox_inches="tight") + +if Spectrum_type == "Free_slope": + df = 2 + chi2_68_quantile = scipy.stats.chi2.ppf(0.68, df=df) + chi2_90_quantile = scipy.stats.chi2.ppf(0.90, df=df) + chi2_95_quantile = scipy.stats.chi2.ppf(0.95, df=df) + TS_map = np.zeros((50, 100)) + counts = np.linspace(0, 200, 100) + Slopes = np.linspace(1, 5, 50) + tsbest = 0 + slope_best = 0 + n_best = 0 + c_ul = np.zeros(len(Slopes)) + for i, Slope in enumerate(Slopes): + pr.report_progress( + stage="Progress", progress=5 + 95.0 * (i / len(Slopes)) + ) + for j, n in enumerate(counts): + TS_map[i, j] = 2 * ana.llhratio.evaluate([n, Slope])[0] + if TS_map[i, j] > tsbest: + tsbest = TS_map[i, j] + slope_best = Slope + n_best = n + tsmax = np.max(TS_map[i, :]) + mask_90 = TS_map[i, :] > tsmax - chi2_90_quantile + c_ul[i] = max(counts[mask_90]) + plt.plot(Slopes, c_ul) + # plt.imshow(np.transpose(TS_map),origin='lower',extent=[Slopes[0],Slopes[-1],counts[0],counts[-1]],aspect=0.03,vmin=0) + # plt.colorbar() + # plt.scatter(slope_best,n_best,marker='x',color='red') + print(Slope, n, tsbest, n_best, slope_best) + cnt_68 = plt.contour( + Slopes, + counts, + np.transpose(TS_map), + [tsbest - chi2_68_quantile], + colors="red", + ) + cnt_90 = plt.contour( + Slopes, + counts, + np.transpose(TS_map), + [tsbest - chi2_90_quantile], + colors="red", + ) + cont_68 = cnt_68.get_paths()[0].vertices + gammas_68 = cont_68[:, 0] + norms_68 = cont_68[:, 1] + + cont_90 = cnt_90.get_paths()[0].vertices + gammas_90 = cont_90[:, 0] + norms_90 = cont_90[:, 1] + + F_norms_68 = [] + for i in range(len(norms_68)): + F_norms_68.append( + ana.calculate_fluxmodel_scaling_factor( + norms_68[i], [norms_68[i], gammas_68[i]] + ) + ) + Fbest = ana.calculate_fluxmodel_scaling_factor( + n_best, [n_best, slope_best] + ) + + F_norms_90 = [] + for i in range(len(norms_90)): + F_norms_90.append( + ana.calculate_fluxmodel_scaling_factor( + norms_90[i], [norms_90[i], gammas_90[i]] + ) + ) + # plt.plot(gammas_90,F_norms_90) + F_norms_90 = [] + for i in range(len(Slopes)): + F_norms_90.append( + ana.calculate_fluxmodel_scaling_factor( + c_ul[i], [c_ul[i], Slopes[i]] + ) + ) + + # plt.figure() + # plt.plot(Slopes,F_norms_90) + # print(F_norms_90) + # plt.plot(gammas_68,F_norms_68) + # plt.yscale('log') + # plt.ylabel('Flux normalisation at 1 TeV, 1/(GeV s cm$^2$)') + # plt.scatter([slope_best],[Fbest],marker='x') + # plt.xlabel('Slope') + # plt.savefig('Confidence_range.png',format='png',bbox_inches='tight') + +if Spectrum_type == "Free_slope": + x = np.logspace(-1, 6, 50) + + # 3.690e-15 (E/1000 GeV)^{-2.33} 1/(GeV s cm^2 sr) + ymax_68 = np.zeros(len(x)) + ymin_68 = np.ones(len(x)) + for i in range(len(gammas_68)): + y = ( + 3 * F_norms_68[i] * (x / 1.0) ** (-gammas_68[i]) * x**2 * 1e3 + ) # TeV *(TeV/GeV)/cm2 s + ymax_68 = np.maximum(ymax_68, y) + ymin_68 = np.minimum(ymin_68, y) + + ymax_90 = np.zeros(len(x)) + for i in range(len(Slopes)): + y = ( + 3 * F_norms_90[i] * (x / 1.0) ** (-Slopes[i]) * x**2 * 1e3 + ) # TeV *(TeV/GeV)/cm2 s + ymax_90 = np.maximum(ymax_90, y) + + # plt.plot(x,y) + + ybest = 3 * Fbest * (x / 1.0) ** (-slope_best) * x**2 * 1e3 + if np.amax(TS_map) > chi2_95_quantile: + plt.fill_between( + x, ymin_68, ymax_68, alpha=0.5, label=src_name + " 68% error" + ) + plt.plot(x, ybest, color="black") + plt.plot( + x, ymax_90, color="black", linewidth=2, label=src_name + " 90% UL" + ) + plt.annotate( + "", + xy=(x[24], ymax_90[24] / 4.0), + xytext=(x[24], ymax_90[24]), + arrowprops=dict(facecolor="black", shrink=0.05, width=2, headwidth=8), + ) + + plt.xscale("log") + plt.yscale("log") + plt.legend(loc="upper right") + plt.xlabel("$E$, TeV") + plt.ylabel("$E^2 dN/dE$, TeV/(cm$^2$s), all flavours") + plt.ylim(1e-14, 3e-9) + plt.xlim(1e-1, 1e6) + plt.savefig("Spectrum.png", format="png", bbox_inches="tight") + +bin_image = PictureProduct.from_file("Spectrum.png") +from astropy.table import Table + +data = [x, ybest, ymin_68, ymax_68, ymax_90] +names = ( + "Energy[TeV]", + "F_best[TeV/cm2s]", + "F_min_68[TeV/cm2s]", + "F_max_68[TeV/cm2s]", + "F_max_90[TeV/cm2s]", +) +spec_params = ODAAstropyTable(Table(data, names=names)) + +get_ipython().system(" rm -rfv {data_dir}") # noqa: F821 + +png = bin_image # http://odahub.io/ontology#ODAPictureProduct +table = spec_params # http://odahub.io/ontology#ODAAstropyTable + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append(("out_Spectrum_png", "png_galaxy.output", png)) +_oda_outs.append(("out_Spectrum_table", "table_galaxy.output", table)) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/icecube/icecube_astro_tool.xml b/tools/icecube/icecube_astro_tool.xml new file mode 100644 index 00000000..daa08865 --- /dev/null +++ b/tools/icecube/icecube_astro_tool.xml @@ -0,0 +1,189 @@ + + + unzip + ipython + astropy + gammapy + matplotlib + astroquery + scipy + oda-api + nbconvert + wget + pandas + numpy + iminuit + skyllh + multiprocess + + + ipython '$__tool_directory__/${_data_product._selector}.py' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + _data_product['_selector'] == 'Image' + + + _data_product['_selector'] == 'Image' + + + _data_product['_selector'] == 'Spectrum' + + + _data_product['_selector'] == 'Spectrum' + + + _data_product['_selector'] == 'Lightcurve' + + + _data_product['_selector'] == 'Lightcurve' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This service provides analysis of publicly available data of IceCube +neutrino telescope, described by `IceCube Collaboration +(2021) <https://arxiv.org/abs/2101.09836>`__, using the +`SkyLLH <https://icecube.github.io/skyllh/master/html/index.html>`__ +data analysis package. Three types of data products are generated: sky +images, source lightcurves and spectra. + +The image that will be produced is a Test Statistic (TS) value map +around the reference source position. The TS values is computed for the +powerlaw spectral model of a source placed at the center of each pixel. +It is possible to either fix the slope of the spectrum or leave it free, +using a ``Fixed_slope`` / ``Free_slope`` switch. For the ``Fixed slope`` +choice, it is possible to set the spectral slope setting the ``Slope`` +parameter. It is possible to adjust the time interval of the analysis by +including or excluding fixed observation periods, IC40, IC59, IC79, +IC86-I, IC86-II-VII, onto which the IceCube dataset is divided (see +`IceCube Collaboration (2021) <https://arxiv.org/abs/2101.09836>`__ for +the time bounds of the periods, between 2008 and 2018). + +Similar to the Image, the spectrum can be requested either for a fixed +spectral slope, or leaving the slope free in the spectral fitting, using +the ``Fixed_slope`` / ``Free_slope`` switch. For the fixed spectral +slope, the analysis builds a likelihood profile to find the best-fit +number of counts from the source and the error intervals (defined as the +boundaries of the interval in which the TS value decreases by 1 with +respect to the maximum, the 90% upper bound is defined at the upper +boundary of the interval at which the TS value decreases by 2.7). The +counts are converted to the flux normalisation using the +*calculate_fluxmodel_scaling_factor* funciton of SkyLLH. In the case of +``Free Slope`` choice, TS values are calculated as a function of the +source counts and spectral slope and 68% confidence contours are defined +as the level at which TS decreases by 2.3 with respect to the maximal +value. If the maximal TS value exceeds 6, the best-fit spectrum is +plotted together with the 68% confidence range “butterfly”. Otherwise, +an upper limit on the powerlaw type spectra is shown. This upper limit +is shown as a curve that is a tangent to the maximal possible powerlaw +spectrum (at 90% confidence level) for each spectral slope in the range +between 1 and 5. + +For the lightcurve, the entire ten-year time span of IceCube public data +is divided into intevals corresponding to the IceCube observational +periods (from IC40 to IC86-VII). For each period, the source flux is +extracted assuming fixed spectral slope. + + + 10.21234/CPKQ-K003 + + \ No newline at end of file