Skip to content
Snippets Groups Projects
Commit 57b49bee authored by Johannes Schumann's avatar Johannes Schumann
Browse files

Merge branch 'weights' into 'master'

Add w2 weights

See merge request !3
parents 484b1b9c a91f97fe
No related branches found
No related tags found
1 merge request!3Add w2 weights
Pipeline #16239 passed with warnings
......@@ -3,7 +3,7 @@
# Filename: config.py
# Author: Johannes Schumann <jschumann@km3net.de>
"""
Configuration for km3buu
"""
import os
......@@ -30,6 +30,8 @@ log = get_logger(__name__)
GIBUU_SECTION = "GiBUU"
PROPOSAL_SECTION = "PROPOSAL"
GSEAGEN_SECTION = "gSeagen"
GSEAGEN_MEDIA_COMPOSITION_FILE = "MediaComposition.xml"
class Config(object):
......@@ -95,7 +97,15 @@ class Config(object):
@proposal_itp_tables.setter
def proposal_itp_tables(self, value):
self.set(PROPOSAL_SECTION, "itp_table_path")
self.set(PROPOSAL_SECTION, "itp_table_path", value)
@property
def gseagen_path(self):
return self.get(GSEAGEN_SECTION, "path")
@gseagen_path.setter
def gseagen_path(self, value):
self.set(GIBUU_SECTION, "path", value)
def read_media_compositions(filename):
......@@ -122,9 +132,15 @@ def read_media_compositions(filename):
elements[name] = (mendeleev.element(name), fraction)
attr = dict()
if "density" in media.attrib:
density = media.attrib["density"]
density = float(media.attrib["density"])
attr["density"] = density
attr["elements"] = elements
key = media.attrib["media"]
compositions[key] = attr
return compositions
def read_default_media_compositions():
cfg = Config()
fpath = join(cfg.gseagen_path, "dat", GSEAGEN_MEDIA_COMPOSITION_FILE)
return read_media_compositions(fpath)
......@@ -22,8 +22,11 @@ import awkward1
import uproot4
from scipy.interpolate import UnivariateSpline
from scipy.spatial.transform import Rotation
import scipy.constants as constants
import mendeleev
from .jobcard import Jobcard, read_jobcard, PDGID_LOOKUP
from .config import read_default_media_compositions
EVENT_FILENAME = "FinalEvents.dat"
ROOT_PERT_FILENAME = "EventOutput.Pert.[0-9]{8}.root"
......@@ -31,6 +34,8 @@ ROOT_REAL_FILENAME = "EventOutput.Real.[0-9]{8}.root"
FLUXDESCR_FILENAME = "neutrino_initialized_energyFlux.dat"
XSECTION_FILENAMES = {"all": "neutrino_absorption_cross_section_ALL.dat"}
SECONDS_PER_YEAR = 365.25 * 24 * 60 * 60
PARTICLE_COLUMNS = ["E", "Px", "Py", "Pz", "barcode"]
EVENTINFO_COLUMNS = [
"weight", "evType", "lepIn_E", "lepIn_Px", "lepIn_Py", "lepIn_Pz",
......@@ -161,8 +166,34 @@ class GiBUUOutput:
xsec = np.divide(total_flux_events * weights, deltaE * n_files)
return xsec
def _w2weights(self, root_tupledata):
pass
def w2weights(self, volume, target_density, solid_angle):
"""
Calculate w2weights
Parameters
----------
volume: float [m^3]
The interaction volume
target_density: float [m^-3]
N_A * ρ
solid_angle: float
Solid angle of the possible neutrino incident direction
"""
root_tupledata = self.arrays
energy_min = np.min(self.flux_data["energy"])
energy_max = np.max(self.flux_data["energy"])
energy_phase_space = self.flux_interpolation.integral(
energy_min, energy_max)
xsec = self._event_xsec(
root_tupledata
) * self.A #xsec_per_nucleon * no_nucleons in the core
inv_gen_flux = np.power(
self.flux_interpolation(root_tupledata.lepIn_E), -1)
phase_space = solid_angle * energy_phase_space
env_factor = volume * SECONDS_PER_YEAR
retval = env_factor * phase_space * inv_gen_flux * xsec * 10**-42 * target_density
return retval
@staticmethod
def bjorken_x(roottuple_data):
......@@ -222,23 +253,12 @@ class GiBUUOutput:
@property
def df(self):
"""
GiBUU output data in pandas dataframe format
"""
import pandas as pd
df = None
for fname in self.root_pert_files:
with uproot4.open(join(self._data_path, fname)) as fobj:
event_data = fobj[ROOTTUPLE_KEY].arrays()
tmp_df = awkward1.to_pandas(event_data)
idx_mask = tmp_df.groupby(level=[0]).ngroup()
tmp_df["xsec"] = self._event_xsec(event_data)[idx_mask]
tmp_df["Bx"] = GiBUUOutput.bjorken_x(event_data)[idx_mask]
tmp_df["By"] = GiBUUOutput.bjorken_y(event_data)[idx_mask]
if df is None:
df = tmp_df
else:
new_indices = tmp_df.index.levels[0] + df.index.levels[0].max(
) + 1
tmp_df.index = tmp_df.index.set_levels(new_indices, level=0)
df = df.append(tmp_df)
df = awkward1.to_pandas(self.arrays)
sec_df = df[df.index.get_level_values(1) == 0]
sec_df.loc[:, "E"] = sec_df.lepOut_E
sec_df.loc[:, "Px"] = sec_df.lepOut_Px
......@@ -253,6 +273,25 @@ class GiBUUOutput:
df = df.append(sec_df)
return df
@property
def arrays(self):
"""
GiBUU output data in awkward1 format
"""
retval = None
for ifile in self.root_pert_files:
fobj = uproot4.open(join(self.data_path, ifile))
if retval is None:
retval = fobj["RootTuple"].arrays()
else:
tmp = fobj["RootTuple"].arrays()
retval = np.concatenate((retval, tmp))
# Calculate additional information
retval["xsec"] = self._event_xsec(retval)
retval["Bx"] = GiBUUOutput.bjorken_x(retval)
retval["By"] = GiBUUOutput.bjorken_y(retval)
return retval
def write_detector_file(gibuu_output,
ofile="gibuu.aanet.root",
......@@ -269,7 +308,8 @@ def write_detector_file(gibuu_output,
ofile: str
Output filename
can: tuple
The can dimensions which are used to distribute the events
The can dimensions which are used to distribute the events
(z_min, z_max, radius)
livetime: float
The data livetime
"""
......@@ -277,17 +317,14 @@ def write_detector_file(gibuu_output,
aafile = ROOT.EventFile()
aafile.set_output(ofile)
mc_event_id = 0
mc_trk_id = 0
def add_particles(particle_data, pos_offset, rotation):
def add_particles(particle_data, pos_offset, rotation, start_mc_trk_id,
timestamp):
nonlocal aafile
nonlocal mc_event_id
nonlocal mc_trk_id
for i in range(len(particle_data.E)):
trk = ROOT.Trk()
trk.id = mc_trk_id
mc_trk_id += 1
trk.id = start_mc_trk_id + i
mom = np.array([
particle_data.Px[i], particle_data.Py[i], particle_data.Pz[i]
])
......@@ -296,14 +333,16 @@ def write_detector_file(gibuu_output,
prtcl_pos = np.array([
particle_data.x[i], particle_data.y[i], particle_data.z[i]
])
prtcl_pos = rotation.apply(prtcl_pos)
trk.pos.set(*np.add(pos_offset, prtcl_pos))
trk.t = timestamp + particle_data.deltaT[i] * 1e9
except AttributeError:
trk.pos.set(*pos_offset)
trk.t = timestamp
trk.dir.set(*p_dir)
trk.mother_id = 0
trk.type = int(particle_data.barcode[i])
trk.E = particle_data.E[i]
trk.t = timestamp
aafile.evt.mc_trks.push_back(trk)
is_cc = False
......@@ -322,84 +361,95 @@ def write_detector_file(gibuu_output,
nu_type *= -1
sec_lep_type *= -1
for ifile in gibuu_output.root_pert_files:
fobj = uproot4.open(join(gibuu_output.data_path, ifile))
event_data = fobj["RootTuple"].arrays()
bjorkenx = GiBUUOutput.bjorken_x(event_data)
bjorkeny = GiBUUOutput.bjorken_y(event_data)
tau_secondaries = None
if propagate_tau and abs(nu_type) == 16:
from .propagation import propagate_lepton
tau_secondaries = propagate_lepton(event_data,
np.sign(nu_type) * 15)
for i, event in enumerate(event_data):
aafile.evt.clear()
aafile.evt.id = mc_event_id
aafile.evt.mc_run_id = mc_event_id
mc_event_id += 1
# Vertex Position
r = can[2] * np.sqrt(np.random.uniform(0, 1))
phi = np.random.uniform(0, 2 * np.pi)
pos_x = r * np.cos(phi)
pos_y = r * np.sin(phi)
pos_z = np.random.uniform(can[0], can[1])
vtx_pos = np.array([pos_x, pos_y, pos_z])
# Direction
phi = np.random.uniform(0, 2 * np.pi)
cos_theta = np.random.uniform(-1, 1)
sin_theta = np.sqrt(1 - cos_theta**2)
dir_x = np.cos(phi) * sin_theta
dir_y = np.sin(phi) * sin_theta
dir_z = cos_theta
direction = np.array([dir_x, dir_y, dir_z])
theta = np.arccos(cos_theta)
R = Rotation.from_euler("yz", [theta, phi])
timestamp = np.random.uniform(0, livetime)
nu_in_trk = ROOT.Trk()
nu_in_trk.id = mc_trk_id
event_data = gibuu_output.arrays
bjorkenx = event_data.Bx
bjorkeny = event_data.By
tau_secondaries = None
if propagate_tau and abs(nu_type) == 16:
from .propagation import propagate_lepton
tau_secondaries = propagate_lepton(event_data, np.sign(nu_type) * 15)
media = read_default_media_compositions()
density = media["SeaWater"]["density"]
symbol = mendeleev.element(gibuu_output.Z).symbol
target = media["SeaWater"]["elements"][symbol]
target_density = 1e3 * density * target[1]
targets_per_volume = target_density * (1e3 * constants.Avogadro /
target[0].atomic_weight)
can_volume = np.pi * (can[1] - can[0]) * np.power(can[2], 2)
w2 = gibuu_output.w2weights(can_volume, targets_per_volume, 4 * np.pi)
for mc_event_id, event in enumerate(event_data):
aafile.evt.clear()
aafile.evt.id = mc_event_id
aafile.evt.mc_run_id = mc_event_id
# Weights
aafile.evt.w.push_back(can_volume) #w1 (can volume)
aafile.evt.w.push_back(w2[mc_event_id]) #w2
aafile.evt.w.push_back(-1.0) #w3 (= w2*flux)
# Vertex Position
r = can[2] * np.sqrt(np.random.uniform(0, 1))
phi = np.random.uniform(0, 2 * np.pi)
pos_x = r * np.cos(phi)
pos_y = r * np.sin(phi)
pos_z = np.random.uniform(can[0], can[1])
vtx_pos = np.array([pos_x, pos_y, pos_z])
# Direction
phi = np.random.uniform(0, 2 * np.pi)
cos_theta = np.random.uniform(-1, 1)
sin_theta = np.sqrt(1 - cos_theta**2)
dir_x = np.cos(phi) * sin_theta
dir_y = np.sin(phi) * sin_theta
dir_z = cos_theta
direction = np.array([dir_x, dir_y, dir_z])
theta = np.arccos(cos_theta)
R = Rotation.from_euler("yz", [theta, phi])
timestamp = np.random.uniform(0, livetime)
nu_in_trk = ROOT.Trk()
nu_in_trk.id = mc_trk_id
mc_trk_id += 1
nu_in_trk.mother_id = -1
nu_in_trk.type = nu_type
nu_in_trk.pos.set(*vtx_pos)
nu_in_trk.dir.set(*direction)
nu_in_trk.E = event.lepIn_E
nu_in_trk.t = timestamp
if not propagate_tau:
lep_out_trk = ROOT.Trk()
lep_out_trk.id = mc_trk_id
mc_trk_id += 1
nu_in_trk.mother_id = -1
nu_in_trk.type = nu_type
nu_in_trk.pos.set(*vtx_pos)
nu_in_trk.dir.set(*direction)
nu_in_trk.E = event.lepIn_E
nu_in_trk.t = timestamp
if not propagate_tau:
lep_out_trk = ROOT.Trk()
lep_out_trk.id = mc_trk_id
mc_trk_id += 1
lep_out_trk.mother_id = 0
lep_out_trk.type = sec_lep_type
lep_out_trk.pos.set(*vtx_pos)
mom = np.array(
[event.lepOut_Px, event.lepOut_Py, event.lepOut_Pz])
p_dir = R.apply(mom / np.linalg.norm(mom))
lep_out_trk.dir.set(*p_dir)
lep_out_trk.E = event.lepOut_E
lep_out_trk.t = timestamp
aafile.evt.mc_trks.push_back(lep_out_trk)
# bjorken_y = 1.0 - float(event.lepOut_E / event.lepIn_E)
nu_in_trk.setusr('bx', bjorkenx[i])
nu_in_trk.setusr('by', bjorkeny[i])
nu_in_trk.setusr('ichan', ichan)
nu_in_trk.setusr("cc", is_cc)
aafile.evt.mc_trks.push_back(nu_in_trk)
if tau_secondaries is not None:
event_tau_sec = tau_secondaries[i]
add_particles(event_tau_sec, vtx_pos, R)
add_particles(event, vtx_pos, R)
aafile.write()
lep_out_trk.mother_id = 0
lep_out_trk.type = sec_lep_type
lep_out_trk.pos.set(*vtx_pos)
mom = np.array([event.lepOut_Px, event.lepOut_Py, event.lepOut_Pz])
p_dir = R.apply(mom / np.linalg.norm(mom))
lep_out_trk.dir.set(*p_dir)
lep_out_trk.E = event.lepOut_E
lep_out_trk.t = timestamp
aafile.evt.mc_trks.push_back(lep_out_trk)
# bjorken_y = 1.0 - float(event.lepOut_E / event.lepIn_E)
nu_in_trk.setusr('bx', bjorkenx[mc_event_id])
nu_in_trk.setusr('by', bjorkeny[mc_event_id])
nu_in_trk.setusr('ichan', ichan)
nu_in_trk.setusr("cc", is_cc)
aafile.evt.mc_trks.push_back(nu_in_trk)
if tau_secondaries is not None:
event_tau_sec = tau_secondaries[mc_event_id]
add_particles(event_tau_sec, vtx_pos, R, mc_trk_id, timestamp)
mc_trk_id += len(event_tau_sec.E)
add_particles(event, vtx_pos, R, mc_trk_id, timestamp)
aafile.write()
del aafile
......@@ -83,6 +83,7 @@ def propagate_lepton(lepout_data, pdgid):
for entry in lepout_data:
lepton.energy = entry.lepOut_E * 1e3
lepton.position = pp.Vector3D(0, 0, 0)
lepton.direction = pp.Vector3D(entry.lepOut_Px, entry.lepOut_Py,
entry.lepOut_Pz)
lepton.direction.normalize()
......@@ -90,12 +91,13 @@ def propagate_lepton(lepout_data, pdgid):
E = np.array(secondaries.energy) / 1e3
pdgid = [p.type for p in secondaries.particles]
Px = [p.direction.x * p.momentum for p in secondaries.particles]
Py = [p.direction.y * p.momentum for p in secondaries.particles]
Pz = [p.direction.z * p.momentum for p in secondaries.particles]
x = [p.position.x for p in secondaries.particles]
y = [p.position.y for p in secondaries.particles]
z = [p.position.z for p in secondaries.particles]
Px = [p.direction.x * p.momentum / 1e3 for p in secondaries.particles]
Py = [p.direction.y * p.momentum / 1e3 for p in secondaries.particles]
Pz = [p.direction.z * p.momentum / 1e3 for p in secondaries.particles]
x = [p.position.x / 100 for p in secondaries.particles]
y = [p.position.y / 100 for p in secondaries.particles]
z = [p.position.z / 100 for p in secondaries.particles]
dt = [p.time for p in secondaries.particles]
propagated_data["E"].append(E)
propagated_data["Px"].append(Px)
......@@ -105,5 +107,6 @@ def propagate_lepton(lepout_data, pdgid):
propagated_data["x"].append(x)
propagated_data["y"].append(y)
propagated_data["z"].append(z)
propagated_data["deltaT"].append(dt)
return ak.Array(propagated_data)
......@@ -40,17 +40,17 @@ class TestTauPropagation(unittest.TestCase):
def test_secondary_momenta(self):
np.testing.assert_array_almost_equal(np.array(self.sec[0].E),
[0.5, 1.3, 0.3],
decimal=1)
[0.535, 1.316, 0.331],
decimal=3)
np.testing.assert_array_almost_equal(np.array(self.sec[0].Px),
[-467.4, 320.7, -245.5],
decimal=1)
[-0.467, 0.321, -0.246],
decimal=3)
np.testing.assert_array_almost_equal(np.array(self.sec[0].Py),
[127.2, -822.4, 217.5],
decimal=1)
[0.127, -0.822, 0.218],
decimal=3)
np.testing.assert_array_almost_equal(np.array(self.sec[0].Pz),
[179., 967.1, -41.1],
decimal=1)
[0.179, 0.967, -0.041],
decimal=3)
def test_secondary_types(self):
np.testing.assert_array_equal(np.array(self.sec[0].barcode),
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment