Skip to content
Snippets Groups Projects

Another dev

Merged Mikhail Smirnov requested to merge another-dev into main
2 files
+ 81
10
Compare changes
  • Side-by-side
  • Inline
Files
2
+ 77
9
@@ -5,12 +5,14 @@ import awkward as ak
import pandas as pd
import uproot as ur
from km3io import OfflineReader
from irf_tools import aeff_2D
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
from astropy.io import fits
import astropy.units as u
from astropy.io import fits
# from gammapy.irf import EnergyDispersion2D
@@ -33,7 +35,10 @@ def build_aeff(
input=(file_nu, file_nubar),
no_bdt=False,
cuts=True,
weight_factor=-2.5
weight_factor=-2.5,
cos_theta_binE=np.linspace(1, -1, 13),
energy_binE=np.logspace(2, 8, 49),
output="aeff.fits"
):
"""
Create Aeff .fits from dist files
@@ -46,6 +51,12 @@ def build_aeff(
weight_factor: re-weight data, default value -2.5
cos_theta_binE: numpy array of linear bins for cos of zenith angle theta
energy_binE: log numpy array of enegy bins
output: name of generated Aeff file with extension .fits
"""
# Read data files using km3io
@@ -63,18 +74,42 @@ def build_aeff(
if cuts:
mask_nu = get_cut_mask(df_nu.bdt0, df_nu.bdt1, df_nu.dir_z)
df_nu_cut = df_nu[mask_nu].copy()
df_nu = df_nu[mask_nu].copy()
mask_nubar = get_cut_mask(df_nubar.bdt0, df_nubar.bdt1, df_nubar.dir_z)
df_nubar_cut = df_nubar[mask_nubar].copy()
df_nubar = df_nubar[mask_nubar].copy()
# calculate the normalized weight factor for each event
weights = dict()
for l, df in zip(['nu', 'nubar'], [df_nu_cut, df_nubar_cut]):
weights[l] = (df.energy_mc**(weight_factor - alpha_value)).to_numpy()
weights[l] *= len(df) / weights[l].sum()
# calculate the normalized weight factor for each event
weights = dict()
for l, df in zip(['nu', 'nubar'], [df_nu, df_nubar]):
weights[l] = (df.energy_mc**(weight_factor - alpha_value)).to_numpy()
weights[l] *= len(df) / weights[l].sum()
# Create DataFrames with neutrinos and anti-neutrinos
df_nu_all = pd.concat([df_nu, df_nubar], ignore_index=True)
# Also create a concatenated array for the weights
weights_all = np.concatenate([weights['nu'], weights['nubar']])
# Define bins for Aeff
# cos_bins_fine = np.linspace(1, -1, 13)
theta_binE = np.arccos(cos_theta_binE)
# energy_binE = np.logspace(2, 8, 49)
# Bin centers
energy_binC = np.sqrt(energy_binE[:-1] * energy_binE[1:])
theta_binC = np.arccos(0.5*(cos_theta_binE[:-1] + cos_theta_binE[1:]))
# Fill histograms for effective area
aeff_all = aeff_2D(
e_bins = energy_binE,
t_bins = theta_binE,
dataset = df_nu_all,
gamma = (-weight_factor),
nevents = f_nu_km3io.header.genvol.numberOfEvents + f_nubar_km3io.header.genvol.numberOfEvents
) * 2 # two building blocks
new_aeff_file = WriteAeff(energy_binC, energy_binE, theta_binC, theta_binE, aeff_hist=aeff_all)
new_aeff_file.to_fits(file_name=output)
def unpack_data(no_bdt, type, km3io_file, uproot_file):
"""
@@ -140,3 +175,36 @@ def get_cut_mask(bdt0, bdt1, dir_z):
strong_horizontal = (np.arccos(dir_z)*180/np.pi > 80) & (bdt1 > 0.7) # apply strong cut on horizontal events
return mask_down & ( clear_signal | loose_up | strong_horizontal )
# Class for writing aeff_2D to fits files
class WriteAeff:
def __init__(self, energy_binC, energy_binE, theta_binC, theta_binE, aeff_hist):
self.col1 = fits.Column(name='ENERG_LO', format='{}E'.format(len(energy_binC)), unit='GeV', array=[energy_binE[:-1]])
self.col2 = fits.Column(name='ENERG_HI', format='{}E'.format(len(energy_binC)), unit='GeV', array=[energy_binE[1:]])
self.col3 = fits.Column(name='THETA_LO', format='{}E'.format(len(theta_binC)), unit='rad', array=[theta_binE[:-1]])
self.col4 = fits.Column(name='THETA_HI', format='{}E'.format(len(theta_binC)), unit='rad', array=[theta_binE[1:]])
self.col5 = fits.Column(name='EFFAREA', format='{}D'.format(len(energy_binC)*len(theta_binC)),
dim='({},{})'.format(len(energy_binC), len(theta_binC)), unit='m2', array=[aeff_hist])
def to_fits(self, file_name):
"""
write .fits file
file_name: should have .fits extension
"""
cols = fits.ColDefs([self.col1, self.col2, self.col3, self.col4, self.col5])
hdu = fits.PrimaryHDU()
hdu2 = fits.BinTableHDU.from_columns(cols)
hdu2.header['EXTNAME'] = 'EFFECTIVE AREA'
hdu2.header['HDUDOC'] = 'https://github.com/open-gamma-ray-astro/gamma-astro-data-formats'
hdu2.header['HDUVERS'] = '0.2'
hdu2.header['HDUCLASS'] = 'GADF'
hdu2.header['HDUCLAS1'] = 'RESPONSE'
hdu2.header['HDUCLAS2'] = 'EFF_AREA'
hdu2.header['HDUCLAS3'] = 'FULL-ENCLOSURE'
hdu2.header['HDUCLAS4'] = 'AEFF_2D'
aeff_fits = fits.HDUList([hdu, hdu2])
aeff_fits.writeto(file_name, overwrite=True)
return print(f'file {file_name} is written successfully!')
Loading