Skip to content
Snippets Groups Projects
Commit 84d5d34c authored by Rodri's avatar Rodri
Browse files

make black

parent 1508e51f
No related branches found
No related tags found
1 merge request!67Draft: Resolve "Reading summary slices and extracting the rates of each optical module"
Pipeline #25210 passed
...@@ -17,32 +17,35 @@ import os ...@@ -17,32 +17,35 @@ import os
import numpy as np import numpy as np
import h5py import h5py
def create_matrix(dom_ids, dom_rates, dom_map): def create_matrix(dom_ids, dom_rates, dom_map):
rows = len(dom_map) rows = len(dom_map)
columns = len(dom_rates) columns = len(dom_rates)
matrix = np.zeros((rows,columns)) matrix = np.zeros((rows, columns))
for i, (rates, doms) in enumerate(zip(dom_rates,dom_ids)): for i, (rates, doms) in enumerate(zip(dom_rates, dom_ids)):
for r,d in zip(rates,doms): for r, d in zip(rates, doms):
row = dom_map[d] row = dom_map[d]
matrix[row][i]=r matrix[row][i] = r
return matrix return matrix
def append_to_hdf5(file, doms_rates_matrix, frame_indices, frame_times, index2dom): def append_to_hdf5(file, doms_rates_matrix, frame_indices, frame_times, index2dom):
for i, row in enumerate(doms_rates_matrix): for i, row in enumerate(doms_rates_matrix):
dataset = file[str(index2dom[i])] dataset = file[str(index2dom[i])]
chunk_size = len(row) chunk_size = len(row)
dataset.resize(dataset.shape[0]+chunk_size, axis=0) dataset.resize(dataset.shape[0] + chunk_size, axis=0)
dataset[-chunk_size:] = row dataset[-chunk_size:] = row
frame_indices_dataset = file["frame_indices"] frame_indices_dataset = file["frame_indices"]
chunk_size = len(frame_indices) chunk_size = len(frame_indices)
frame_indices_dataset.resize(frame_indices_dataset.shape[0]+chunk_size, axis=0) frame_indices_dataset.resize(frame_indices_dataset.shape[0] + chunk_size, axis=0)
dataset[-chunk_size:] = frame_indices dataset[-chunk_size:] = frame_indices
frame_times_dataset = file["frame_times"] frame_times_dataset = file["frame_times"]
chunk_size = len(frame_times) chunk_size = len(frame_times)
frame_times_dataset.resize(frame_times_dataset.shape[0]+chunk_size, axis=0) frame_times_dataset.resize(frame_times_dataset.shape[0] + chunk_size, axis=0)
dataset[-chunk_size:] = frame_times dataset[-chunk_size:] = frame_times
def main(): def main():
arguments = docopt(__doc__) arguments = docopt(__doc__)
...@@ -56,8 +59,8 @@ def main(): ...@@ -56,8 +59,8 @@ def main():
dom2index = {} dom2index = {}
index2dom = {} index2dom = {}
for idx, dom in enumerate(detector.dom_ids): for idx, dom in enumerate(detector.dom_ids):
dom2index[dom]=idx dom2index[dom] = idx
index2dom[idx]=dom index2dom[idx] = dom
# Create output file and datasets # Create output file and datasets
h5 = h5py.File(data["output_file"], "a") h5 = h5py.File(data["output_file"], "a")
...@@ -67,18 +70,25 @@ def main(): ...@@ -67,18 +70,25 @@ def main():
h5.create_dataset("frame_indices", (0,), maxshape=(None,)) h5.create_dataset("frame_indices", (0,), maxshape=(None,))
# Read the channel rates from the summary slices, calculate the total module rates, and save them to the output file # Read the channel rates from the summary slices, calculate the total module rates, and save them to the output file
reader = km3io.online.SummarysliceReader(data["input_file"],10000) reader = km3io.online.SummarysliceReader(data["input_file"], 10000)
for ss_chunk in reader: for ss_chunk in reader:
frame_indices = ss_chunk.headers.frame_index frame_indices = ss_chunk.headers.frame_index
frame_times = ss_chunk.headers.UTC_seconds * 1e9 + ss_chunk.headers.UTC_16nanosecondcycles * 16 frame_times = (
raw_rates = [km3io.online.get_rate(getattr(ss_chunk.slices, f"ch{ch}")) for ch in range(31)] ss_chunk.headers.UTC_seconds * 1e9
+ ss_chunk.headers.UTC_16nanosecondcycles * 16
)
raw_rates = [
km3io.online.get_rate(getattr(ss_chunk.slices, f"ch{ch}"))
for ch in range(31)
]
dom_ids = ss_chunk.slices.dom_id dom_ids = ss_chunk.slices.dom_id
dom_rates = np.zeros_like(raw_rates[0]) dom_rates = np.zeros_like(raw_rates[0])
for ch in range(31): for ch in range(31):
dom_rates=np.add(dom_rates, raw_rates[ch]) dom_rates = np.add(dom_rates, raw_rates[ch])
m = create_matrix(dom_ids, dom_rates, dom2index) m = create_matrix(dom_ids, dom_rates, dom2index)
append_to_hdf5(h5, m, frame_indices, frame_times, index2dom) append_to_hdf5(h5, m, frame_indices, frame_times, index2dom)
if __name__ == "__main__": if __name__ == "__main__":
main() main()
...@@ -785,7 +785,6 @@ class TestSummarysliceReader(unittest.TestCase): ...@@ -785,7 +785,6 @@ class TestSummarysliceReader(unittest.TestCase):
assert first_frame_index == sr[0].headers[0].frame_index assert first_frame_index == sr[0].headers[0].frame_index
assert last_frame_index == sr[1].headers[0].frame_index assert last_frame_index == sr[1].headers[0].frame_index
assert last_frame_index == sr[-1].headers[0].frame_index assert last_frame_index == sr[-1].headers[0].frame_index
assert first_frame_index == sr[-2].headers[0].frame_index assert first_frame_index == sr[-2].headers[0].frame_index
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment