diff --git a/km3io/gseagen.py b/km3io/gseagen.py index 8d73c337e697369a130496847f1e5d5e581c603b..ba2468071c7b2cadfdd9ca112cb748e9454b22cd 100644 --- a/km3io/gseagen.py +++ b/km3io/gseagen.py @@ -3,7 +3,7 @@ # Filename: gseagen.py # Author: Johannes Schumann <jschumann@km3net.de> -import uproot4 as uproot +import uproot3 as uproot import numpy as np import warnings from .rootio import Branch, BranchMapper diff --git a/km3io/offline.py b/km3io/offline.py index fefca228a5b91357ab808b205ed5c8155077ca79..33c712d394bb9118bf5c54416e2548e43ebb7b3c 100644 --- a/km3io/offline.py +++ b/km3io/offline.py @@ -87,40 +87,58 @@ class OfflineReader: event_path = "E/Evt" item_name = "OfflineEvent" - skip_keys = ["mc_trks", "trks", "t", "AAObject"] + skip_keys = ["t", "AAObject"] aliases = {"t_s": "t.fSec", "t_ns": "t.fNanoSec"} - special_keys = { + special_branches = { "hits": { "channel_id": "hits.channel_id", "dom_id": "hits.dom_id", "time": "hits.t", "tot": "hits.tot", - "triggered": "hits.trig", + "triggered": "hits.trig", # non-zero if the hit is a triggered hit }, "mc_hits": { "pmt_id": "mc_hits.pmt_id", - "time": "mc_hits.t", - "a": "mc_hits.a", + "time": "mc_hits.t", # hit time (MC truth) + "a": "mc_hits.a", # hit amplitude (in p.e.) + "origin": "mc_hits.origin", # track id of the track that created this hit + "pure_t": "mc_hits.pure_t", # photon time before pmt simultion + "pure_a": "mc_hits.pure_a", # amplitude before pmt simution, + "type": "mc_hits.type", # particle type or parametrisation used for hit }, "trks": { + "id": "trks.id", + "pos_x": "trks.pos.x", + "pos_y": "trks.pos.y", + "pos_z": "trks.pos.z", "dir_x": "trks.dir.x", "dir_y": "trks.dir.y", "dir_z": "trks.dir.z", + "t": "trks.t", + "E": "trks.E", + "len": "trks.len", + "lik": "trks.lik", + "rec_type": "trks.rec_type", "rec_stages": "trks.rec_stages", "fitinf": "trks.fitinf", }, "mc_trks": { + "id": "mc_trks.id", + "pos_x": "mc_trks.pos.x", + "pos_y": "mc_trks.pos.y", + "pos_z": "mc_trks.pos.z", "dir_x": "mc_trks.dir.x", "dir_y": "mc_trks.dir.y", "dir_z": "mc_trks.dir.z", + # "status": "mc_trks.status", # TODO: check this + # "mother_id": "mc_trks.mother_id", # TODO: check this + "type": "mc_trks.type", + "hit_ids": "mc_trks.hit_ids", }, } - # TODO: this is fishy special_aliases = { - "trks": "tracks", - "hits": "hits", - "mc_hits": "mc_hits", - "mc_trks": "mc_tracks", + "tracks": "trks", + "mc_tracks": "mc_trks", } def __init__(self, file_path, step_size=2000): @@ -142,36 +160,58 @@ class OfflineReader: self._filename = file_path self._uuid = self._fobj._file.uuid self._iterator_index = 0 - self._subbranches = None + self._keys = None + + self._initialise_keys() + self._event_ctor = namedtuple( self.item_name, set( list(self.keys()) - + list(self.aliases.keys()) - + list(self.special_aliases[k] for k in self.special_keys) + + list(self.aliases) + + list(self.special_branches) + + list(self.special_aliases) ), ) + def _initialise_keys(self): + toplevel_keys = set(k.split("/")[0] for k in self._fobj[self.event_path].keys()) + keys = (toplevel_keys - set(self.skip_keys)).union( + list(self.aliases.keys()) + list(self.special_aliases) + ) + self._keys = keys + def keys(self): - if self._subbranches is None: - subbranches = defaultdict(list) - for key in self._fobj[self.event_path].keys(): - toplevel, *remaining = key.split("/") - if remaining: - subbranches[toplevel].append("/".join(remaining)) - else: - subbranches[toplevel] = [] - for key in self.skip_keys: - del subbranches[key] - self._subbranches = subbranches - return self._subbranches.keys() + """Returns all accessible branch keys, without the skipped ones.""" + return self._keys @property def events(self): return iter(self) + def _keyfor(self, key): + """Return the correct key for a given alias/key""" + return self.special_aliases.get(key, key) + + def __getattr__(self, attr): + attr = self._keyfor(attr) + if attr in self.keys(): + return self.__getitem__(attr) + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{attr}'" + ) + def __getitem__(self, key): - return self._fobj[self.event_path][key].array() + key = self._keyfor(key) + branch = self._fobj[self.event_path] + # These are special branches which are nested, like hits/trks/mc_trks + # We are explicitly grabbing just a predefined set of subbranches + # and also alias them to be backwards compatible (and attribute-accessible) + if key in self.special_branches: + return branch[key].arrays( + self.special_branches[key].keys(), aliases=self.special_branches[key] + ) + return branch[self.aliases.get(key, key)].array() def __iter__(self): self._iterator_index = 0 @@ -180,28 +220,33 @@ class OfflineReader: def _event_generator(self): events = self._fobj[self.event_path] - keys = list(set(self.keys()) - set(self.special_keys.keys())) + list( - self.aliases.keys() - ) + keys = list( + set(self.keys()) + - set(self.special_branches.keys()) + - set(self.special_aliases) + ) + list(self.aliases.keys()) events_it = events.iterate(keys, aliases=self.aliases, step_size=self.step_size) specials = [] special_keys = ( - self.special_keys.keys() + self.special_branches.keys() ) # dict-key ordering is an implementation detail for key in special_keys: specials.append( events[key].iterate( - self.special_keys[key].keys(), - aliases=self.special_keys[key], + self.special_branches[key].keys(), + aliases=self.special_branches[key], step_size=self.step_size, ) ) for event_set, *special_sets in zip(events_it, *specials): for _event, *special_items in zip(event_set, *special_sets): - yield self._event_ctor( + data = { **{k: _event[k] for k in keys}, - **{k: i for (k, i) in zip(special_keys, special_items)} - ) + **{k: i for (k, i) in zip(special_keys, special_items)}, + } + for tokey, fromkey in self.special_aliases.items(): + data[tokey] = data[fromkey] + yield self._event_ctor(**data) def __next__(self): return next(self._events) diff --git a/km3io/online.py b/km3io/online.py index 13467e5ff96ed89eb397ffb3e0ef2283ba459ffc..5e6d9843b0e5209acd4fc1f3f5e878b20c57f7fa 100644 --- a/km3io/online.py +++ b/km3io/online.py @@ -1,6 +1,6 @@ import binascii import os -import uproot4 as uproot +import uproot3 as uproot import numpy as np import numba as nb @@ -8,7 +8,7 @@ import numba as nb TIMESLICE_FRAME_BASKET_CACHE_SIZE = 523 * 1024 ** 2 # [byte] SUMMARYSLICE_FRAME_BASKET_CACHE_SIZE = 523 * 1024 ** 2 # [byte] BASKET_CACHE_SIZE = 110 * 1024 ** 2 -BASKET_CACHE = uproot.cache.LRUArrayCache(BASKET_CACHE_SIZE) +BASKET_CACHE = uproot.cache.ThreadSafeArrayCache(BASKET_CACHE_SIZE) # Parameters for PMT rate conversions, since the rates in summary slices are # stored as a single byte to save space. The values from 0-255 can be decoded @@ -113,7 +113,7 @@ class OnlineReader: self._events = None self._timeslices = None self._summaryslices = None - self._uuid = self._fobj._file.uuid + self._uuid = binascii.hexlify(self._fobj._context.uuid).decode("ascii") @property def uuid(self): @@ -214,9 +214,9 @@ class SummarySlices: return self._rates def _read_summaryslices(self): - """Reads the summary slices""" + """Reads a lazyarray of summary slices""" tree = self._fobj[b"KM3NET_SUMMARYSLICE"][b"KM3NET_SUMMARYSLICE"] - return tree[b"vector<KM3NETDAQ::JDAQSummaryFrame>"].array( + return tree[b"vector<KM3NETDAQ::JDAQSummaryFrame>"].lazyarray( uproot.asjagged( uproot.astable( uproot.asdtype( @@ -233,15 +233,15 @@ class SummarySlices: ), skipbytes=10, ), - basketcache=uproot.cache.LRUArrayCache( + basketcache=uproot.cache.ThreadSafeArrayCache( SUMMARYSLICE_FRAME_BASKET_CACHE_SIZE ), ) def _read_headers(self): - """Reads the summary slice headers""" + """Reads a lazyarray of summary slice headers""" tree = self._fobj[b"KM3NET_SUMMARYSLICE"][b"KM3NET_SUMMARYSLICE"] - return tree[b"KM3NETDAQ::JDAQSummarysliceHeader"].array( + return tree[b"KM3NETDAQ::JDAQSummarysliceHeader"].lazyarray( uproot.interpret(tree[b"KM3NETDAQ::JDAQSummarysliceHeader"], cntvers=True) ) @@ -277,11 +277,11 @@ class Timeslices: hits_dtype = np.dtype([("pmt", "u1"), ("tdc", "<u4"), ("tot", "u1")]) hits_buffer = superframes[ b"vector<KM3NETDAQ::JDAQSuperFrame>.buffer" - ].array( + ].lazyarray( uproot.asjagged( uproot.astable(uproot.asdtype(hits_dtype)), skipbytes=6 ), - basketcache=uproot.cache.LRUArrayCache( + basketcache=uproot.cache.ThreadSafeArrayCache( TIMESLICE_FRAME_BASKET_CACHE_SIZE ), ) @@ -311,13 +311,13 @@ class Timeslices: class TimesliceStream: def __init__(self, headers, superframes, hits_buffer): - # self.headers = headers.array( + # self.headers = headers.lazyarray( # uproot.asjagged(uproot.astable( # uproot.asdtype( # np.dtype([('a', 'i4'), ('b', 'i4'), ('c', 'i4'), # ('d', 'i4'), ('e', 'i4')]))), # skipbytes=6), - # basketcache=uproot.cache.LRUArrayCache( + # basketcache=uproot.cache.ThreadSafeArrayCache( # TIMESLICE_FRAME_BASKET_CACHE_SIZE)) self.headers = headers self.superframes = superframes @@ -325,10 +325,10 @@ class TimesliceStream: # def frames(self): # n_hits = self._superframe[ - # b'vector<KM3NETDAQ::JDAQSuperFrame>.numberOfHits'].array( + # b'vector<KM3NETDAQ::JDAQSuperFrame>.numberOfHits'].lazyarray( # basketcache=BASKET_CACHE)[self._idx] # module_ids = self._superframe[ - # b'vector<KM3NETDAQ::JDAQSuperFrame>.id'].array(basketcache=BASKET_CACHE)[self._idx] + # b'vector<KM3NETDAQ::JDAQSuperFrame>.id'].lazyarray(basketcache=BASKET_CACHE)[self._idx] # idx = 0 # for module_id, n_hits in zip(module_ids, n_hits): # self._frames[module_id] = hits_buffer[idx:idx + n_hits] @@ -358,22 +358,24 @@ class Timeslice: hits_buffer = self._hits_buffer[self._idx] n_hits = self._superframe[ b"vector<KM3NETDAQ::JDAQSuperFrame>.numberOfHits" - ].array(basketcache=BASKET_CACHE)[self._idx] + ].lazyarray(basketcache=BASKET_CACHE)[self._idx] try: module_ids = self._superframe[ b"vector<KM3NETDAQ::JDAQSuperFrame>.id" - ].array(basketcache=BASKET_CACHE)[self._idx] + ].lazyarray(basketcache=BASKET_CACHE)[self._idx] except KeyError: - raise - # module_ids = self._superframe[ - # b'vector<KM3NETDAQ::JDAQSuperFrame>.KM3NETDAQ::JDAQModuleIdentifier'].array( - # uproot.asjagged( - # uproot.astable(uproot.asdtype([("dom_id", ">i4")])) - # ), - # basketcache=BASKET_CACHE, - # )[self._idx] - # .dom_id - # ) + module_ids = ( + self._superframe[ + b"vector<KM3NETDAQ::JDAQSuperFrame>.KM3NETDAQ::JDAQModuleIdentifier" + ] + .lazyarray( + uproot.asjagged( + uproot.astable(uproot.asdtype([("dom_id", ">i4")])) + ), + basketcache=BASKET_CACHE, + )[self._idx] + .dom_id + ) idx = 0 for module_id, n_hits in zip(module_ids, n_hits): @@ -383,7 +385,7 @@ class Timeslice: def __len__(self): if self._n_frames is None: self._n_frames = len( - self._superframe[b"vector<KM3NETDAQ::JDAQSuperFrame>.id"].array( + self._superframe[b"vector<KM3NETDAQ::JDAQSuperFrame>.id"].lazyarray( basketcache=BASKET_CACHE )[self._idx] ) diff --git a/km3io/rootio.py b/km3io/rootio.py index 921da35ae020274276e0f5776de76f7c93e87542..4668dae30a0a5a2e3d91efdd18e2f643007094d5 100644 --- a/km3io/rootio.py +++ b/km3io/rootio.py @@ -1,13 +1,13 @@ #!/usr/bin/env python3 import numpy as np import awkward1 as ak -import uproot4 as uproot +import uproot3 as uproot from .tools import unfold_indices # 110 MB based on the size of the largest basket found so far in km3net BASKET_CACHE_SIZE = 110 * 1024 ** 2 -BASKET_CACHE = uproot.cache.LRUArrayCache(BASKET_CACHE_SIZE) +BASKET_CACHE = uproot.cache.ThreadSafeArrayCache(BASKET_CACHE_SIZE) class BranchMapper: @@ -34,6 +34,9 @@ class BranchMapper: The function to be used to create attribute names. This is only needed if unsupported characters are present, like ``.``, which would prevent setting valid Python attribute names. + toawkward: ``None``, ``list(str)`` + List of keys to convert to awkward arrays (recommended for + doubly ragged arrays) """ def __init__( @@ -46,6 +49,7 @@ class BranchMapper: attrparser=None, flat=True, interpretations=None, + toawkward=None, ): self.name = name self.key = key @@ -56,6 +60,7 @@ class BranchMapper: self.attrparser = (lambda x: x) if attrparser is None else attrparser self.flat = flat self.interpretations = {} if interpretations is None else interpretations + self.toawkward = [] if toawkward is None else toawkward class Branch: @@ -103,7 +108,9 @@ class Branch: def _initialise_keys(self): """Create the keymap and instance attributes for branch keys""" # TODO: this could be a cached property - keys = set(self._branch.keys()) - set(self._mapper.exclude) + keys = set(k.decode("utf-8") for k in self._branch.keys()) - set( + self._mapper.exclude + ) self._keymap = { **{self._mapper.attrparser(k): k for k in keys}, **self._mapper.extra, @@ -130,32 +137,33 @@ class Branch: def __getkey__(self, key): interpretation = self._mapper.interpretations.get(key) - # if key == "usr_names": - # # TODO this will be fixed soon in uproot, - # # see https://github.com/scikit-hep/uproot/issues/465 - # import pdb; pdb.set_trace() - # interpretation = uproot.asgenobj( - # uproot.SimpleArray(uproot.STLVector(uproot.STLString())), - # self._branch[self._keymap[key]]._context, - # 6, - # ) - # - # if key == "usr": - # # triple jagged array is wrongly parsed in uproot - # interpretation = uproot.asgenobj( - # uproot.SimpleArray(uproot.STLVector(uproot.asdtype(">f8"))), - # self._branch[self._keymap[key]]._context, - # 6, - # ) - # - out = self._branch[self._keymap[key]].array(interpretation=interpretation) - # if self._index_chain is not None and key in self._mapper.toawkward: - # cache_key = self._mapper.name + "/" + key - # if cache_key not in self._awkward_cache: - # if len(out) > 20000: # It will take more than 10 seconds - # print("Creating cache for '{}'.".format(cache_key)) - # self._awkward_cache[cache_key] = ak.from_iter(out) - # out = self._awkward_cache[cache_key] + if key == "usr_names": + # TODO this will be fixed soon in uproot, + # see https://github.com/scikit-hep/uproot/issues/465 + interpretation = uproot.asgenobj( + uproot.SimpleArray(uproot.STLVector(uproot.STLString())), + self._branch[self._keymap[key]]._context, + 6, + ) + + if key == "usr": + # triple jagged array is wrongly parsed in uproot + interpretation = uproot.asgenobj( + uproot.SimpleArray(uproot.STLVector(uproot.asdtype(">f8"))), + self._branch[self._keymap[key]]._context, + 6, + ) + + out = self._branch[self._keymap[key]].lazyarray( + interpretation=interpretation, basketcache=BASKET_CACHE + ) + if self._index_chain is not None and key in self._mapper.toawkward: + cache_key = self._mapper.name + "/" + key + if cache_key not in self._awkward_cache: + if len(out) > 20000: # It will take more than 10 seconds + print("Creating cache for '{}'.".format(cache_key)) + self._awkward_cache[cache_key] = ak.from_iter(out) + out = self._awkward_cache[cache_key] return unfold_indices(out, self._index_chain) def __getitem__(self, item): @@ -180,7 +188,7 @@ class Branch: def __len__(self): if not self._index_chain: - return self._branch.num_entries + return len(self._branch) elif isinstance(self._index_chain[-1], (int, np.int32, np.int64)): if len(self._index_chain) == 1: try: @@ -191,7 +199,10 @@ class Branch: else: return len( unfold_indices( - self._branch[self._keymap["id"]].array(), self._index_chain + self._branch[self._keymap["id"]].lazyarray( + basketcache=BASKET_CACHE + ), + self._index_chain, ) ) diff --git a/km3io/tools.py b/km3io/tools.py index 6092b9bd3bea7e8a997caa051f1a4e6ac2f69e1b..85653c7fed5080c4693d9cb6f4418052f934d80b 100644 --- a/km3io/tools.py +++ b/km3io/tools.py @@ -2,7 +2,7 @@ import numba as nb import numpy as np import awkward1 as ak1 -import uproot4 as uproot +import uproot3 as uproot from km3io.definitions import reconstruction as krec from km3io.definitions import trigger as ktrg @@ -12,7 +12,7 @@ from km3io.definitions import w2list_gseagen as kw2gsg # 110 MB based on the size of the largest basket found so far in km3net BASKET_CACHE_SIZE = 110 * 1024 ** 2 -BASKET_CACHE = uproot.cache.LRUArrayCache(BASKET_CACHE_SIZE) +BASKET_CACHE = uproot.cache.ThreadSafeArrayCache(BASKET_CACHE_SIZE) class cached_property: diff --git a/requirements/install.txt b/requirements/install.txt index 7ada850b8ca0b785c3fc17514459fb5c87ef5a5b..aa82996e87d817de8569229d5b6bd031214f5d7c 100644 --- a/requirements/install.txt +++ b/requirements/install.txt @@ -2,4 +2,5 @@ docopt numba>=0.50 awkward1>=0.3.1 uproot4 +uproot3 setuptools_scm diff --git a/tests/test_gseagen.py b/tests/test_gseagen.py index 2776d71ad1d225ff5c53500905bf7a0b2eb0ee3f..4b55e89ff63426635e6b93b6d873d361882b4834 100644 --- a/tests/test_gseagen.py +++ b/tests/test_gseagen.py @@ -13,7 +13,6 @@ class TestGSGHeader(unittest.TestCase): def setUp(self): self.header = GSG_READER.header - @unittest.skip def test_str_byte_type(self): assert isinstance(self.header["gSeaGenVer"], str) assert isinstance(self.header["GenieVer"], str) @@ -22,7 +21,6 @@ class TestGSGHeader(unittest.TestCase): assert isinstance(self.header["Flux1"], str) assert isinstance(self.header["Flux2"], str) - @unittest.skip def test_values(self): assert self.header["RunNu"] == 1 assert self.header["RanSeed"] == 3662074 @@ -57,7 +55,6 @@ class TestGSGHeader(unittest.TestCase): assert self.header["NNu"] == 2 self.assertListEqual(self.header["NuList"].tolist(), [-14, 14]) - @unittest.skip def test_unsupported_header(self): f = GSGReader(data_path("online/km3net_online.root")) with self.assertWarns(UserWarning): diff --git a/tests/test_offline.py b/tests/test_offline.py index 26f3d4184028527d22ecebb97e84596707d66445..8407705c87003d2fb5c1cff6b3e503446d026c44 100644 --- a/tests/test_offline.py +++ b/tests/test_offline.py @@ -158,6 +158,8 @@ class TestOfflineEvents(unittest.TestCase): def test_attributes(self): assert self.n_events == len(self.events.det_id) self.assertListEqual(self.det_id, list(self.events.det_id)) + print(self.n_hits) + print(self.events.hits) self.assertListEqual(self.n_hits, len(self.events.hits)) self.assertListEqual(self.n_tracks, len(self.events.tracks)) self.assertListEqual(self.t_sec, list(self.events.t_sec)) diff --git a/tests/test_online.py b/tests/test_online.py index 1f9d942e24e5f81091e892dc7af8411a96b128d3..50e5689aab4b48dd8ee33b57ddaa027e058ebf20 100644 --- a/tests/test_online.py +++ b/tests/test_online.py @@ -26,7 +26,6 @@ class TestOnlineReaderContextManager(unittest.TestCase): class TestUUID(unittest.TestCase): - @unittest.skip def test_uuid(self): assert OnlineReader(ONLINE_FILE).uuid == "00010c85603008c611ea971772f09e86beef" @@ -35,15 +34,12 @@ class TestOnlineEvents(unittest.TestCase): def setUp(self): self.events = OnlineReader(ONLINE_FILE).events - @unittest.skip def test_index_lookup(self): assert 3 == len(self.events) - @unittest.skip def test_str(self): assert re.match(".*events.*3", str(self.events)) - @unittest.skip def test_repr(self): assert re.match(".*events.*3", self.events.__repr__()) @@ -52,11 +48,9 @@ class TestOnlineEvent(unittest.TestCase): def setUp(self): self.event = OnlineReader(ONLINE_FILE).events[0] - @unittest.skip def test_str(self): assert re.match(".*event.*96.*snapshot.*18.*triggered", str(self.event)) - @unittest.skip def test_repr(self): assert re.match(".*event.*96.*snapshot.*18.*triggered", self.event.__repr__()) @@ -67,7 +61,6 @@ class TestOnlineEventsSnapshotHits(unittest.TestCase): self.lengths = {0: 96, 1: 124, -1: 78} self.total_item_count = 298 - @unittest.skip def test_reading_snapshot_hits(self): hits = self.events.snapshot_hits @@ -76,7 +69,6 @@ class TestOnlineEventsSnapshotHits(unittest.TestCase): assert length == len(hits[event_id].channel_id) assert length == len(hits[event_id].time) - @unittest.skip def test_total_item_counts(self): hits = self.events.snapshot_hits @@ -84,7 +76,6 @@ class TestOnlineEventsSnapshotHits(unittest.TestCase): assert self.total_item_count == sum(hits.channel_id.count()) assert self.total_item_count == sum(hits.time.count()) - @unittest.skip def test_data_values(self): hits = self.events.snapshot_hits @@ -94,7 +85,6 @@ class TestOnlineEventsSnapshotHits(unittest.TestCase): self.assertListEqual([10, 13, 0], list(hits.channel_id[0][:3])) self.assertListEqual([30733918, 30733916, 30733256], list(hits.time[0][:3])) - @unittest.skip def test_channel_ids_have_valid_values(self): hits = self.events.snapshot_hits @@ -109,7 +99,6 @@ class TestOnlineEventsTriggeredHits(unittest.TestCase): self.lengths = {0: 18, 1: 53, -1: 9} self.total_item_count = 80 - @unittest.skip def test_data_lengths(self): hits = self.events.triggered_hits @@ -119,7 +108,6 @@ class TestOnlineEventsTriggeredHits(unittest.TestCase): assert length == len(hits[event_id].time) assert length == len(hits[event_id].trigger_mask) - @unittest.skip def test_total_item_counts(self): hits = self.events.triggered_hits @@ -127,7 +115,6 @@ class TestOnlineEventsTriggeredHits(unittest.TestCase): assert self.total_item_count == sum(hits.channel_id.count()) assert self.total_item_count == sum(hits.time.count()) - @unittest.skip def test_data_values(self): hits = self.events.triggered_hits @@ -138,7 +125,6 @@ class TestOnlineEventsTriggeredHits(unittest.TestCase): self.assertListEqual([30733918, 30733916, 30733429], list(hits.time[0][:3])) self.assertListEqual([16, 16, 4], list(hits.trigger_mask[0][:3])) - @unittest.skip def test_channel_ids_have_valid_values(self): hits = self.events.triggered_hits @@ -151,7 +137,6 @@ class TestTimeslices(unittest.TestCase): def setUp(self): self.ts = OnlineReader(ONLINE_FILE).timeslices - @unittest.skip def test_data_lengths(self): assert 3 == len(self.ts._timeslices["L1"][0]) assert 3 == len(self.ts._timeslices["SN"][0]) @@ -160,16 +145,13 @@ class TestTimeslices(unittest.TestCase): with self.assertRaises(KeyError): assert 0 == len(self.ts._timeslices["L0"][0]) - @unittest.skip def test_streams(self): self.ts.stream("L1", 0) self.ts.stream("SN", 0) - @unittest.skip def test_reading_frames(self): assert 8 == len(self.ts.stream("SN", 1).frames[808447186]) - @unittest.skip def test_str(self): s = str(self.ts) assert "L1" in s @@ -181,7 +163,6 @@ class TestTimeslice(unittest.TestCase): self.ts = OnlineReader(ONLINE_FILE).timeslices self.n_frames = {"L1": [69, 69, 69], "SN": [64, 66, 68]} - @unittest.skip def test_str(self): for stream, n_frames in self.n_frames.items(): print(stream, n_frames) @@ -194,7 +175,6 @@ class TestSummaryslices(unittest.TestCase): def setUp(self): self.ss = OnlineReader(ONLINE_FILE).summaryslices - @unittest.skip def test_headers(self): assert 3 == len(self.ss.headers) self.assertListEqual([44, 44, 44], list(self.ss.headers.detector_id)) @@ -202,15 +182,12 @@ class TestSummaryslices(unittest.TestCase): self.assertListEqual([126, 127, 128], list(self.ss.headers.frame_index)) assert 806451572 == self.ss.slices[0].dom_id[0] - @unittest.skip def test_slices(self): assert 3 == len(self.ss.slices) - @unittest.skip def test_rates(self): assert 3 == len(self.ss.rates) - @unittest.skip def test_fifo(self): s = self.ss.slices[0] dct_fifo_stat = { @@ -223,7 +200,6 @@ class TestSummaryslices(unittest.TestCase): frame = s[s.dom_id == dom_id] assert any(get_channel_flags(frame.fifo[0])) == fifo_status - @unittest.skip def test_has_udp_trailer(self): s = self.ss.slices[0] dct_udp_trailer = { @@ -243,7 +219,6 @@ class TestSummaryslices(unittest.TestCase): frame = s[s.dom_id == dom_id] assert has_udp_trailer(frame.fifo[0]) == udp_trailer - @unittest.skip def test_high_rate_veto(self): s = self.ss.slices[0] dct_high_rate_veto = { @@ -284,7 +259,6 @@ class TestSummaryslices(unittest.TestCase): frame = s[s.dom_id == dom_id] assert any(get_channel_flags(frame.hrv[0])) == high_rate_veto - @unittest.skip def test_max_sequence_number(self): s = self.ss.slices[0] dct_seq_numbers = { @@ -323,7 +297,6 @@ class TestSummaryslices(unittest.TestCase): get_udp_max_sequence_number(frame.dq_status[0]) == max_sequence_number ) - @unittest.skip def test_number_udp_packets(self): s = self.ss.slices[0] dct_n_packets = { @@ -353,7 +326,6 @@ class TestSummaryslices(unittest.TestCase): frame = s[s.dom_id == dom_id] assert get_number_udp_packets(frame.dq_status[0]) == n_udp_packets - @unittest.skip def test_hrv_flags(self): s = self.ss.slices[0] dct_hrv_flags = { @@ -496,7 +468,6 @@ class TestSummaryslices(unittest.TestCase): [a == b for a, b in zip(get_channel_flags(frame.hrv[0]), hrv_flags)] ) - @unittest.skip def test_fifo_flags(self): s = self.ss.slices[0] dct_fifo_flags = { @@ -705,13 +676,11 @@ class TestSummaryslices(unittest.TestCase): [a == b for a, b in zip(get_channel_flags(frame.fifo[0]), fifo_flags)] ) - @unittest.skip def test_str(self): print(str(self.ss)) class TestGetChannelFlags_Issue59(unittest.TestCase): - @unittest.skip def test_sample_summaryslice_dump(self): fieldnames = ["dom_id"]