Skip to content
Snippets Groups Projects
Commit ec3bbb5c authored by ViaFerrata's avatar ViaFerrata
Browse files

- Cleaned up code

- Corrected run_id readout due to new data structure
- Added time_residual_vertex to readout
- Changed event track datatype from float32 to float64 for better precision with times
parent 0648c35b
Branches
Tags
No related merge requests found
......@@ -3,7 +3,6 @@
"""This utility code contains functions that read the raw MC .h5 files"""
import numpy as np
import km3pipe as kp
#from memory_profiler import profile
#import line_profiler # call with kernprof -l -v file.py args
......@@ -32,9 +31,6 @@ def get_time_residual_nu_interaction_mean_triggered_hits(time_interaction, hits_
t_mean_triggered = np.mean(hits_time_triggered, dtype=np.float64)
time_residual_vertex = t_mean_triggered - time_interaction
print(t_mean_triggered)
print(time_interaction)
return time_residual_vertex
......@@ -60,7 +56,7 @@ def get_event_data(event_blob, geo, do_mc_hits, use_calibrated_file, data_cuts,
# parse tracks [event_id, particle_type, energy, isCC, bjorkeny, dir_x/y/z, time]
event_id = event_blob['EventInfo'].event_id[0]
run_id = event_blob['EventInfo'].run_id[0]
run_id = event_blob['RawHeader'][0][0].astype('float32')
particle_type = event_blob['McTracks'][p].type
energy = event_blob['McTracks'][p].energy
is_cc = event_blob['McTracks'][p].is_cc
......@@ -68,6 +64,7 @@ def get_event_data(event_blob, geo, do_mc_hits, use_calibrated_file, data_cuts,
dir_x, dir_y, dir_z = event_blob['McTracks'][p].dir_x, event_blob['McTracks'][p].dir_y, event_blob['McTracks'][p].dir_z
time_track = event_blob['McTracks'][p].time # actually always 0 for primary neutrino, measured in MC time
vertex_pos_x, vertex_pos_y, vertex_pos_z = event_blob['McTracks'][p].pos_x, event_blob['McTracks'][p].pos_y, event_blob['McTracks'][p].pos_z
time_interaction = event_blob['McTracks'][p].time
# parse hits [x,y,z,time]
if do_mc_hits is True:
......@@ -83,18 +80,14 @@ def get_event_data(event_blob, geo, do_mc_hits, use_calibrated_file, data_cuts,
#hits = hits.triggered_hits # alternative, though it only works for the triggered condition!
pos_x, pos_y, pos_z = hits.pos_x.astype('float32'), hits.pos_y.astype('float32'), hits.pos_z.astype('float32')
hits_time = hits.time.astype('float32')
hits_time = hits.time.astype('float32') # enough for the hit times in KM3NeT
triggered = hits.triggered.astype('float32')
time_residual_vertex = get_time_residual_nu_interaction_mean_triggered_hits(time_interaction, hits_time, triggered)
# save collected information to arrays event_track and event_hits
event_track = np.array([event_id, particle_type, energy, is_cc, bjorkeny, dir_x, dir_y, dir_z, time_track,
run_id, vertex_pos_x, vertex_pos_y, vertex_pos_z], dtype=np.float32)
# time_residual_vertex = get_time_residual_nu_interaction_mean_triggered_hits(time_interaction, hits_time, triggered)
#
# # save collected information to arrays event_track and event_hits
# event_track = np.array([event_id, particle_type, energy, is_cc, bjorkeny, dir_x, dir_y, dir_z, time_track,
# run_id, vertex_pos_x, vertex_pos_y, vertex_pos_z, time_residual_vertex], dtype=np.float32)
run_id, vertex_pos_x, vertex_pos_y, vertex_pos_z, time_residual_vertex], dtype=np.float64)
ax = np.newaxis
event_hits = np.concatenate([pos_x[:, ax], pos_y[:, ax], pos_z[:, ax], hits_time[:, ax], triggered[:, ax]], axis=1)
......
......
......@@ -7,6 +7,7 @@ import sys
import warnings
#from memory_profiler import profile # for memory profiling, call with @profile; myfunc()
#import line_profiler # call with kernprof -l -v file.py args
import km3pipe as kp
import matplotlib as mpl
mpl.use('Agg')
from matplotlib.backends.backend_pdf import PdfPages
......@@ -99,7 +100,7 @@ def calculate_bin_edges(n_bins, geo_fix, fname_geo_limits, do4d):
z_bin_edges = np.linspace(geo_limits[0][3] - 4.665, geo_limits[1][3] + 4.665, num=n_bins[2] + 1) # Delta z = 9.329
# ORCA denser detector study
z_bin_edges = np.linspace(37.84 - 7.5, 292.84 + 7.5, num=n_bins[2] + 1) # 15m vertical, 18 DOMs
#z_bin_edges = np.linspace(37.84 - 7.5, 292.84 + 7.5, num=n_bins[2] + 1) # 15m vertical, 18 DOMs # TODO change
#z_bin_edges = np.linspace(37.84 - 6, 241.84 + 6, num=n_bins[2] + 1) # 12m vertical, 18 DOMs
#z_bin_edges = np.linspace(37.84 - 4.5, 190.84 + 4.5, num=n_bins[2] + 1) # 9m vertical, 18 DOMs
#z_bin_edges = np.linspace(37.84 - 3, 139.84 + 3, num=n_bins[2] + 1) # 6m vertical, 18 DOMs
......@@ -171,7 +172,8 @@ def main(n_bins, geo_fix=True, do2d=False, do2d_pdf=(False, 10), do3d=False, do4
# filter out all hit and track information belonging that to this event
event_hits, event_track = get_event_data(event_blob, geo, do_mc_hits, use_calibrated_file, data_cuts, do4d)
if event_track[2] < data_cuts['energy_lower_limit']: # Cutting events with energy < threshold (default=0)
if event_track[2] < data_cuts['energy_lower_limit'] or event_track[2] > data_cuts['energy_upper_limit']:
# Cutting events with energy < threshold (default=0) and with energy > threshold (default=200)
continue
# event_track: [event_id, particle_type, energy, isCC, bjorkeny, dir_x/y/z, time]
......@@ -212,19 +214,21 @@ def main(n_bins, geo_fix=True, do2d=False, do2d_pdf=(False, 10), do3d=False, do4
if folder != '': folder += '/'
if do4d[1] == 'channel_id':
store_histograms_as_hdf5(np.array(all_4d_to_4d_hists), np.array(mc_infos), 'Results/4dTo4d/h5/xyzt/' + folder + filename_output + '_xyzc.h5', compression=('gzip', 1))
store_histograms_as_hdf5(np.array(all_4d_to_4d_hists), np.array(mc_infos), 'Results/4dTo4d/h5/xyzc/' + folder + filename_output + '_xyzc.h5', compression=('gzip', 1))
else:
store_histograms_as_hdf5(np.array(all_4d_to_4d_hists), np.array(mc_infos), 'Results/4dTo4d/h5/xyzt/' + folder + filename_output + '_xyzt.h5', compression=('gzip', 1))
if __name__ == '__main__':
main(n_bins=(11,13,18,60), geo_fix=True, do2d=False, do2d_pdf=(False, 20), do3d=False, do4d=(True, 'time'),
timecut = ('trigger_cluster', 'tight_1'), do_mc_hits=False, use_calibrated_file=True,
data_cuts = {'triggered': False, 'energy_lower_limit': 0})
# main(n_bins=(11,13,18,31), do2d=False, do2d_pdf=(False, 100), do3d=False, do4d=(True, 'channel_id'),
# do_mc_hits=False, use_calibrated_file=True, data_cuts = {'triggered': False, 'energy_lower_limit': 0})
# main(n_bins=(11,18,50,31), do2d=False, do2d_pdf=(False, 100), do3d=False, do4d=(True, 'xzt-c'),
# do_mc_hits=False, use_calibrated_file=True, data_cuts = {'triggered': False, 'energy_lower_limit': 0})
# main(n_bins=(11,13,18,60), geo_fix=True, do2d=False, do2d_pdf=(False, 10), do3d=False, do4d=(True, 'time'),
# timecut = ('trigger_cluster', 'all'), do_mc_hits=False, use_calibrated_file=True,
# data_cuts = {'triggered': False, 'energy_lower_limit': 0, 'energy_upper_limit': 200})
main(n_bins=(11,13,18,60), geo_fix=True, do2d=False, do2d_pdf=(False, 10), do3d=False, do4d=(True, 'channel_id'),
timecut = ('trigger_cluster', 'all'), do_mc_hits=False, use_calibrated_file=True,
data_cuts = {'triggered': False, 'energy_lower_limit': 0, 'energy_upper_limit': 200})
......
......
......@@ -233,14 +233,6 @@ def compute_4d_to_4d_histograms(event_hits, x_bin_edges, y_bin_edges, z_bin_edge
range=((min(x_bin_edges),max(x_bin_edges)),(min(y_bin_edges),max(y_bin_edges)),
(min(z_bin_edges),max(z_bin_edges)),(np.amin(channel_id), np.amax(channel_id))))
elif do4d[1] == 'xzt-c':
time = event_hits[:, 3]
event_hits = event_hits[np.logical_and(time >= t_start, time <= t_end)]
x, z, t, channel_id = event_hits[:, 0:1], event_hits[:, 2:3], event_hits[:, 3:4], event_hits[:, 5:6]
hist_4d = np.histogramdd(np.concatenate([x, z, t, channel_id], axis=1), bins=(x_bin_edges, z_bin_edges, n_bins[2], 31),
range=((min(x_bin_edges),max(x_bin_edges)),(min(z_bin_edges),max(z_bin_edges)),
(t_start, t_end),(np.amin(channel_id), np.amax(channel_id))))
else:
raise ValueError('The parameter in do4d[1] ' + str(do4d[1]) + ' is not available. Currently, only time and channel_id are supported.')
......
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment