Skip to content
Snippets Groups Projects
Commit 34c1381d authored by Johannes Schumann's avatar Johannes Schumann
Browse files

Merge branch 'header_info_update' into 'master'

Header information for number of events

See merge request !35
parents 5a91019b 6d56cfdd
No related branches found
No related tags found
1 merge request!35Header information for number of events
Pipeline #31089 passed
......@@ -104,7 +104,11 @@ class NoVolume(DetectorVolume):
self._coord_origin = (.0, .0, .0)
def header_entries(self, nevents=0):
return dict()
retdct = dict()
key = "genvol"
value = "0 0 0 0 {}".format(nevents)
retdct[key] = value
return retdct
def random_pos(self):
return (.0, .0, .0)
......
......@@ -719,9 +719,8 @@ def write_detector_file(gibuu_output,
header_dct = EMPTY_KM3NET_HEADER_DICT.copy()
header_dct["target"] = element.name
for k, v in geometry.header_entries(gibuu_output._generated_events //
no_files).items():
header_dct[k] = v
header_dct["gibuu_Nevents"] = str(gibuu_output._generated_events)
header_dct["n_split_files"] = str(no_files)
header_dct["coord_origin"] = "{} {} {}".format(*geometry.coord_origin)
header_dct["flux"] = "{:d} 0 0".format(nu_type)
header_dct["cut_nu"] = "{:.2f} {:.2f} -1 1".format(gibuu_output.energy_min,
......@@ -749,11 +748,6 @@ def write_detector_file(gibuu_output,
tree.Branch("Evt", evt, 32000, 4)
mc_trk_id = 0
head = ROOT.Head()
for k, v in header_dct.items():
head.set_line(k, v)
head.Write("Head")
for mc_event_id, event in enumerate(event_data[start_id:stop_id]):
evt.clear()
evt.id = mc_event_id
......@@ -842,6 +836,15 @@ def write_detector_file(gibuu_output,
add_particles(event, vtx_pos, R, mc_trk_id, timestamp,
PARTICLE_MC_STATUS["TRK_ST_FINALSTATE"])
tree.Fill()
for k, v in geometry.header_entries(mc_event_id + 1).items():
header_dct[k] = v
head = ROOT.Head()
for k, v in header_dct.items():
head.set_line(k, v)
head.Write("Head")
outfile.Write()
outfile.Close()
del head
......
......@@ -90,6 +90,7 @@ class TestGiBUUOutput(unittest.TestCase):
2511.13458,
places=2)
@pytest.mark.skipif(not KM3NET_LIB_AVAILABLE,
reason="KM3NeT dataformat required")
class TestOfflineFile(unittest.TestCase):
......@@ -101,6 +102,10 @@ class TestOfflineFile(unittest.TestCase):
write_detector_file(output, datafile.name)
self.fobj = km3io.OfflineReader(datafile.name)
def test_header_event_numbers(self):
np.testing.assert_equal(self.fobj.header.genvol.numberOfEvents, 4005)
np.testing.assert_equal(self.fobj.header.gibuu_Nevents, 10000)
def test_numbering(self):
evts = self.fobj.events
np.testing.assert_array_equal(evts.id, range(4005))
......@@ -144,9 +149,11 @@ class TestOfflineFile(unittest.TestCase):
# GiBUU weight
np.testing.assert_almost_equal(evt.w2list[23], 0.004062418521597373)
@pytest.mark.skipif(not KM3NET_LIB_AVAILABLE,
reason="KM3NeT dataformat required")
class TestNoGeometryWriteout(unittest.TestCase):
def setUp(self):
output = GiBUUOutput(TESTDATA_DIR)
datafile = NamedTemporaryFile(suffix=".root")
......@@ -165,7 +172,6 @@ class TestNoGeometryWriteout(unittest.TestCase):
np.testing.assert_allclose(evt.mc_tracks.pos_z, 0.0)
@pytest.mark.skipif(not KM3NET_LIB_AVAILABLE,
reason="KM3NeT dataformat required")
class TestMultiFileOutput(unittest.TestCase):
......@@ -180,6 +186,14 @@ class TestMultiFileOutput(unittest.TestCase):
self.fobj2 = km3io.OfflineReader(
datafile.name.replace(".root", ".2.root"))
def test_header_event_numbers(self):
np.testing.assert_equal(self.fobj1.header.genvol.numberOfEvents, 2002)
np.testing.assert_equal(self.fobj2.header.genvol.numberOfEvents, 2003)
np.testing.assert_equal(self.fobj1.header.gibuu_Nevents, 10000)
np.testing.assert_equal(self.fobj2.header.gibuu_Nevents, 10000)
np.testing.assert_equal(self.fobj1.header.n_split_files, 2)
np.testing.assert_equal(self.fobj2.header.n_split_files, 2)
def test_numbering(self):
np.testing.assert_array_equal(self.fobj1.events.id, range(2002))
np.testing.assert_array_equal(self.fobj2.events.id, range(2003))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment