Skip to content
Snippets Groups Projects
Commit 186053d8 authored by Tamas Gal's avatar Tamas Gal :speech_balloon:
Browse files

Merge branch 'master' into 'add-fitinf'

# Conflicts:
#   km3io/offline.py
#   tests/test_offline.py
parents 523e1d8e bd72ae76
No related branches found
No related tags found
1 merge request!8outsource reconstruction data in separate arrays
Pipeline #8218 passed with warnings
This commit is part of merge request !8. Comments created here will be created in the context of that merge request.
......@@ -4,6 +4,18 @@ Unreleased changes
Version 0
---------
0.8.0 / 2020-01-23
~~~~~~~~~~~~~~~~~~
* Offline file headers are now accessible
0.7.0 / 2020-01-23
~~~~~~~~~~~~~~~~~~
* Reading of summary slice status information is now supported
0.6.3 / 2020-01-09
~~~~~~~~~~~~~~~~~~
* Bugfixes
0.6.2 / 2019-12-22
~~~~~~~~~~~~~~~~~~
* Fixes slicing of ``OfflineTracks``
......
The km3io Python package
========================
.. image:: https://git.km3net.de/km3py/km3io/badges/master/build.svg
.. image:: https://git.km3net.de/km3py/km3io/badges/master/pipeline.svg
:target: https://git.km3net.de/km3py/km3io/pipelines
.. image:: https://git.km3net.de/km3py/km3io/badges/master/coverage.svg
......@@ -140,12 +140,59 @@ First, let's read our file:
.. code-block:: python3
>>> import km3io as ki
>>> file = 'datav6.0test.jchain.aanet.00005971.root'
>>> file = 'my_file.root'
>>> r = ki.OfflineReader(file)
<km3io.aanet.OfflineReader at 0x7f24cc2bd550>
<km3io.offline.OfflineReader at 0x7f24cc2bd550>
and that's it! Note that `file` can be either an str of your file path, or a path-like object.
To read the file header:
.. code-block:: python3
>>> r.header
DAQ 394
PDF 4 58
XSecFile
can 0 1027 888.4
can_user 0.00 1027.00 888.40
coord_origin 0 0 0
cut_in 0 0 0 0
cut_nu 100 1e+08 -1 1
cut_primary 0 0 0 0
cut_seamuon 0 0 0 0
decay doesnt happen
detector NOT
drawing Volume
end_event
genhencut 2000 0
genvol 0 1027 888.4 2.649e+09 100000
kcut 2
livetime 0 0
model 1 2 0 1 12
muon_desc_file
ngen 0.1000E+06
norma 0 0
nuflux 0 3 0 0.500E+00 0.000E+00 0.100E+01 0.300E+01
physics GENHEN 7.2-220514 181116 1138
seed GENHEN 3 305765867 0 0
simul JSirene 11012 11/17/18 07
sourcemode diffuse
spectrum -1.4
start_run 1
target isoscalar
usedetfile false
xlat_user 0.63297
xparam OFF
zed_user 0.00 3450.00
**Note:** not all file header types are supported, so don't be surprised when you get the following warning
.. code-block:: python3
/home/zineb/km3net/km3net/km3io/km3io/offline.py:341: UserWarning: Your file header has an unsupported format
warnings.warn("Your file header has an unsupported format")
To explore all the available branches in our offline file:
.. code-block:: python3
......
......@@ -26,7 +26,7 @@ release = get_distribution('km3io').version
version = '.'.join(release.split('.')[:2])
project = 'km3io {}'.format(km3io.__version__)
copyright = '{0}, Zineb Aly and Tamas Gal'.format(date.today().year)
author = 'Zineb Aly, Tamas Gal'
author = 'Zineb Aly, Tamas Gal, Johannes Schumann'
# -- General configuration ---------------------------------------------------
......
......@@ -2,12 +2,19 @@ import uproot
import numpy as np
import numba as nb
TIMESLICE_FRAME_BASKET_CACHE_SIZE = 23 * 1024**2 # [byte]
TIMESLICE_FRAME_BASKET_CACHE_SIZE = 523 * 1024**2 # [byte]
SUMMARYSLICE_FRAME_BASKET_CACHE_SIZE = 523 * 1024**2 # [byte]
# Parameters for PMT rate conversions, since the rates in summary slices are
# stored as a single byte to save space. The values from 0-255 can be decoded
# using the `get_rate(value)` function, which will yield the actual rate
# in Hz.
MINIMAL_RATE_HZ = 2.0e3
MAXIMAL_RATE_HZ = 2.0e6
RATE_FACTOR = np.log(MAXIMAL_RATE_HZ / MINIMAL_RATE_HZ) / 255
CHANNEL_BITS_TEMPLATE = np.zeros(31, dtype=bool)
@nb.vectorize([
nb.int32(nb.int8),
......@@ -23,10 +30,82 @@ def get_rate(value):
return MINIMAL_RATE_HZ * np.exp(value * RATE_FACTOR)
@nb.guvectorize(
"void(i8, b1[:], b1[:])", "(), (n) -> (n)", target="parallel", nopython=True
)
def unpack_bits(value, bits_template, out):
"""Return a boolean array for a value's bit representation.
This function also accepts arrays as input, the output shape will be
NxM where N is the number of input values and M the length of the
``bits_template`` array, which is just a dummy array, due to the weird
signature system of numba.
Parameters
----------
value: int or np.array(int) with shape (N,)
The binary value of containing the bit information
bits_template: np.array() with shape (M,)
The template for the output array, the only important is its shape
Returns
-------
np.array(bool) either with shape (M,) or (N, M)
"""
for i in range(bits_template.shape[0]):
out[30 - i] = value & (1 << i) > 0
def get_channel_flags(value):
"""Returns the hrv/fifo flags for the PMT channels (hrv/fifo)
Parameters
----------
value : int32
The integer value to be parsed.
"""
channel_bits = np.bitwise_and(value, 0x3FFFFFFF)
flags = unpack_bits(channel_bits, CHANNEL_BITS_TEMPLATE)
return np.flip(flags, axis=-1)
def get_number_udp_packets(value):
"""Returns the number of received UDP packets (dq_status)
Parameters
----------
value : int32
The integer value to be parsed.
"""
return np.bitwise_and(value, 0x7FFF)
def get_udp_max_sequence_number(value):
"""Returns the maximum sequence number of the received UDP packets (dq_status)
Parameters
----------
value : int32
The integer value to be parsed.
"""
return np.right_shift(value, 16)
def has_udp_trailer(value):
"""Returns the UDP Trailer flag (fifo)
Parameters
----------
value : int32
The integer value to be parsed.
"""
return np.any(np.bitwise_and(value, np.left_shift(1, 31)))
class DAQReader:
"""Reader for DAQ ROOT files"""
def __init__(self, filename):
self.fobj = uproot.open(filename)
self._fobj = uproot.open(filename)
self._events = None
self._timeslices = None
self._summaryslices = None
......@@ -34,7 +113,7 @@ class DAQReader:
@property
def events(self):
if self._events is None:
tree = self.fobj["KM3NET_EVENT"]
tree = self._fobj["KM3NET_EVENT"]
headers = tree["KM3NETDAQ::JDAQEventHeader"].array(
uproot.interpret(tree["KM3NETDAQ::JDAQEventHeader"],
......@@ -57,20 +136,20 @@ class DAQReader:
@property
def timeslices(self):
if self._timeslices is None:
self._timeslices = DAQTimeslices(self.fobj)
self._timeslices = DAQTimeslices(self._fobj)
return self._timeslices
@property
def summaryslices(self):
if self._summaryslices is None:
self._summaryslices = SummmarySlices(self.fobj)
self._summaryslices = SummmarySlices(self._fobj)
return self._summaryslices
class SummmarySlices:
"""A wrapper for summary slices"""
def __init__(self, fobj):
self.fobj = fobj
self._fobj = fobj
self._slices = None
self._headers = None
self._rates = None
......@@ -96,7 +175,7 @@ class SummmarySlices:
def _read_summaryslices(self):
"""Reads a lazyarray of summary slices"""
tree = self.fobj[b'KM3NET_SUMMARYSLICE'][b'KM3NET_SUMMARYSLICE']
tree = self._fobj[b'KM3NET_SUMMARYSLICE'][b'KM3NET_SUMMARYSLICE']
return tree[b'vector<KM3NETDAQ::JDAQSummaryFrame>'].lazyarray(
uproot.asjagged(uproot.astable(
uproot.asdtype([("dom_id", "i4"), ("dq_status", "u4"),
......@@ -109,7 +188,7 @@ class SummmarySlices:
def _read_headers(self):
"""Reads a lazyarray of summary slice headers"""
tree = self.fobj[b'KM3NET_SUMMARYSLICE'][b'KM3NET_SUMMARYSLICE']
tree = self._fobj[b'KM3NET_SUMMARYSLICE'][b'KM3NET_SUMMARYSLICE']
return tree[b'KM3NETDAQ::JDAQSummarysliceHeader'].lazyarray(
uproot.interpret(tree[b'KM3NETDAQ::JDAQSummarysliceHeader'],
cntvers=True))
......@@ -118,42 +197,35 @@ class SummmarySlices:
class DAQTimeslices:
"""A simple wrapper for DAQ timeslices"""
def __init__(self, fobj):
self.fobj = fobj
self._fobj = fobj
self._timeslices = {}
self._read_default_stream()
self._read_streams()
def _read_default_stream(self):
"""Read the default KM3NET_TIMESLICE stream"""
tree = self.fobj[b'KM3NET_TIMESLICE'][b'KM3NET_TIMESLICE']
headers = tree[b'KM3NETDAQ::JDAQTimesliceHeader']
superframes = tree[b'vector<KM3NETDAQ::JDAQSuperFrame>']
self._timeslices['default'] = (headers, superframes)
def _read_streams(self):
"""Read the L0, L1, L2 and SN streams if available"""
streams = [
streams = set(
s.split(b"KM3NET_TIMESLICE_")[1].split(b';')[0]
for s in self.fobj.keys() if b"KM3NET_TIMESLICE_" in s
]
for s in self._fobj.keys() if b"KM3NET_TIMESLICE_" in s)
for stream in streams:
tree = self.fobj[b'KM3NET_TIMESLICE_' +
stream][b'KM3NETDAQ::JDAQTimeslice']
tree = self._fobj[b'KM3NET_TIMESLICE_' +
stream][b'KM3NETDAQ::JDAQTimeslice']
headers = tree[b'KM3NETDAQ::JDAQTimesliceHeader'][
b'KM3NETDAQ::JDAQHeader'][b'KM3NETDAQ::JDAQChronometer']
if len(headers) == 0:
continue
superframes = tree[b'vector<KM3NETDAQ::JDAQSuperFrame>']
hits_dtype = np.dtype([("pmt", "u1"), ("tdc", "<u4"),
("tot", "u1")])
hits_buffer = superframes[
b'vector<KM3NETDAQ::JDAQSuperFrame>.buffer'].lazyarray(
uproot.asjagged(uproot.astable(
uproot.asdtype([("pmt", "u1"), ("tdc", "u4"),
("tot", "u1")])),
uproot.asjagged(uproot.astable(uproot.asdtype(hits_dtype)),
skipbytes=6),
basketcache=uproot.cache.ThreadSafeArrayCache(
TIMESLICE_FRAME_BASKET_CACHE_SIZE))
self._timeslices[stream.decode("ascii")] = (headers, superframes,
hits_buffer)
setattr(self, stream.decode("ascii"),
DAQTimesliceStream(headers, superframes, hits_buffer))
def stream(self, stream, idx):
ts = self._timeslices[stream]
......@@ -167,6 +239,21 @@ class DAQTimeslices:
return str(self)
class DAQTimesliceStream:
def __init__(self, headers, superframes, hits_buffer):
# self.headers = headers.lazyarray(
# uproot.asjagged(uproot.astable(
# uproot.asdtype(
# np.dtype([('a', 'i4'), ('b', 'i4'), ('c', 'i4'),
# ('d', 'i4'), ('e', 'i4')]))),
# skipbytes=6),
# basketcache=uproot.cache.ThreadSafeArrayCache(
# TIMESLICE_FRAME_BASKET_CACHE_SIZE))
self.headers = headers
self.superframes = superframes
self._hits_buffer = hits_buffer
class DAQTimeslice:
"""A wrapper for a DAQ timeslice"""
def __init__(self, header, superframe, hits_buffer, idx, stream):
......
# -*- coding: utf-8 -*-
"""
KM3NeT Data Definitions v1.1.2
https://git.km3net.de/common/km3net-dataformat
"""
# fitparameters
data = {
"JGANDALF_BETA0_RAD": 0,
"JGANDALF_BETA1_RAD": 1,
"JGANDALF_CHI2": 2,
"JGANDALF_NUMBER_OF_HITS": 3,
"JENERGY_ENERGY": 4,
"JENERGY_CHI2": 5,
"JGANDALF_LAMBDA": 6,
"JGANDALF_NUMBER_OF_ITERATIONS": 7,
"JSTART_NPE_MIP": 8,
"JSTART_NPE_MIP_TOTAL": 9,
"JSTART_LENGTH_METRES": 10,
"JVETO_NPE": 11,
"JVETO_NUMBER_OF_HITS": 12,
"JENERGY_MUON_RANGE_METRES": 13,
"JENERGY_NOISE_LIKELIHOOD": 14,
"JENERGY_NDF": 15,
"JENERGY_NUMBER_OF_HITS": 16,
"JCOPY_Z_M": 17,
}
# -*- coding: utf-8 -*-
"""
KM3NeT Data Definitions v1.1.2
https://git.km3net.de/common/km3net-dataformat
"""
# reconstruction
data = {
"JPP_RECONSTRUCTION_TYPE": 4000,
"JMUONFIT": 0,
"JMUONBEGIN": 0,
"JMUONPREFIT": 1,
"JMUONSIMPLEX": 2,
"JMUONGANDALF": 3,
"JMUONENERGY": 4,
"JMUONSTART": 5,
"JLINEFIT": 6,
"JMUONEND": 99,
"JSHOWERFIT": 100,
"JSHOWERBEGIN": 100,
"JSHOWERPREFIT": 101,
"JSHOWERPOSITIONFIT": 102,
"JSHOWERCOMPLETEFIT": 103,
"JSHOWER_BJORKEN_Y": 104,
"JSHOWEREND": 199,
"DUSJSHOWERFIT": 200,
"DUSJBEGIN": 200,
"DUSJPREFIT": 201,
"DUSJPOSITIONFIT": 202,
"JDUSJCOMPLETEFIT": 203,
"DUSJEND": 299,
"AASHOWERFIT": 300,
"AASHOWERBEGIN": 300,
"AASHOWERCOMPLETEFIT": 301,
"AASHOWEREND": 399,
"JUSERBEGIN": 1000,
"JMUONVETO": 1001,
"JMUONPATH": 1003,
"JMCEVT": 1004,
"JUSEREND": 1099,
"RECTYPE_UNKNOWN": -1,
"RECSTAGE_UNKNOWN": -1,
}
# -*- coding: utf-8 -*-
"""
KM3NeT Data Definitions v1.1.2
https://git.km3net.de/common/km3net-dataformat
"""
# trigger
data = {
"JTRIGGER3DSHOWER": 1,
"JTRIGGERMXSHOWER": 2,
"JTRIGGER3DMUON": 4,
"JTRIGGERNB": 5,
}
# -*- coding: utf-8 -*-
"""
KM3NeT Data Definitions v1.0.1
https://git.km3net.de/common/data-format
"""
# fitparameters
JGANDALF_BETA0_RAD = 0
JGANDALF_BETA1_RAD = 1
JGANDALF_CHI2 = 2
JGANDALF_NUMBER_OF_HITS = 3
JENERGY_ENERGY = 4
JENERGY_CHI2 = 5
JGANDALF_LAMBDA = 6
JGANDALF_NUMBER_OF_ITERATIONS = 7
JSTART_NPE_MIP = 8
JSTART_NPE_MIP_TOTAL = 9
JSTART_LENGTH_METRES = 10
JVETO_NPE = 11
JVETO_NUMBER_OF_HITS = 12
JENERGY_MUON_RANGE_METRES = 13
JENERGY_NOISE_LIKELIHOOD = 14
JENERGY_NDF = 15
JENERGY_NUMBER_OF_HITS = 16
JCOPY_Z_M = 17
import uproot
import numpy as np
import warnings
# 110 MB based on the size of the largest basket found so far in km3net
BASKET_CACHE_SIZE = 110 * 1024**2
......@@ -320,6 +321,7 @@ class OfflineReader:
self._mc_tracks = None
self._keys = None
self._best_reco = None
self._header = None
def __getitem__(self, item):
return OfflineReader(file_path=self._file_path, data=self._data[item])
......@@ -327,6 +329,20 @@ class OfflineReader:
def __len__(self):
return len(self._data)
@property
def header(self):
if self._header is None:
fobj = uproot.open(self._file_path)
if b'Head;1' in fobj.keys():
self._header = {}
for n, x in fobj['Head']._map_3c_string_2c_string_3e_.items():
print("{:15s} {}".format(n.decode("utf-8"), x.decode("utf-8")))
self._header[n.decode("utf-8")] = x.decode("utf-8")
if b'Header;1' in fobj.keys():
warnings.warn("Your file header has an unsupported format")
return self._header
@property
def keys(self):
"""wrapper for all keys in an offline file.
......
# -*- coding: utf-8 -*-
"""
KM3NeT Data Definitions v1.0.1
https://git.km3net.de/common/data-format
"""
# reconstruction
JMUONFIT = 0
JMUONBEGIN = 0
JMUONPREFIT = 1
JMUONSIMPLEX = 2
JMUONGANDALF = 3
JMUONENERGY = 4
JMUONSTART = 5
JLINEFIT = 6
JMUONEND = 99
JSHOWERFIT = 100
JSHOWERBEGIN = 100
JSHOWERPREFIT = 101
JSHOWERPOSITIONFIT = 102
JSHOWERCOMPLETEFIT = 103
JSHOWER_BJORKEN_Y = 104
JSHOWEREND = 199
DUSJSHOWERFIT = 200
DUSJBEGIN = 200
DUSJPREFIT = 201
DUSJPOSITIONFIT = 202
JDUSJCOMPLETEFIT = 203
DUSJEND = 299
AASHOWERFIT = 300
AASHOWERBEGIN = 300
AASHOWERCOMPLETEFIT = 301
AASHOWEREND = 399
JUSERBEGIN = 1000
JMUONVETO = 1001
JMUONPATH = 1003
JMCEVT = 1004
JUSEREND = 1099
RECTYPE_UNKNOWN = -1
RECSTAGE_UNKNOWN = -1
# -*- coding: utf-8 -*-
"""
KM3NeT Data Definitions v1.0.1
https://git.km3net.de/common/data-format
"""
# trigger
JTRIGGER3DSHOWER = 1
JTRIGGERMXSHOWER = 2
JTRIGGER3DMUON = 4
JTRIGGERNB = 5
......@@ -20,8 +20,8 @@ setup(
url='http://git.km3net.de/km3py/km3io',
description='KM3NeT I/O without ROOT',
long_description=long_description,
author='Zineb Aly, Tamas Gal',
author_email='zaly@km3net.de, tgal@km3net.de',
author='Zineb Aly, Tamas Gal, Johannes Schumann',
author_email='zaly@km3net.de, tgal@km3net.de, johannes.schumann@fau.de',
packages=['km3io'],
include_package_data=True,
platforms='any',
......@@ -33,9 +33,7 @@ setup(
install_requires=requirements,
python_requires='>=3.5',
entry_points={
'console_scripts': [
'KPrintTree=km3io.utils.kprinttree:main'
]
'console_scripts': ['KPrintTree=km3io.utils.kprinttree:main']
},
classifiers=[
'Intended Audience :: Developers',
......@@ -44,4 +42,4 @@ setup(
],
)
__author__ = 'Zineb Aly and Tamas Gal'
__author__ = 'Zineb Aly, Tamas Gal and Johannes Schumann'
......@@ -2,7 +2,7 @@ import os
import re
import unittest
from km3io.daq import DAQReader, get_rate
from km3io.daq import DAQReader, get_rate, has_udp_trailer, get_udp_max_sequence_number, get_channel_flags, get_number_udp_packets
SAMPLES_DIR = os.path.join(os.path.dirname(__file__), "samples")
......@@ -122,7 +122,6 @@ class TestDAQTimeslices(unittest.TestCase):
"daq_v1.0.0.root")).timeslices
def test_data_lengths(self):
assert 3 == len(self.ts._timeslices["default"][0])
assert 3 == len(self.ts._timeslices["L1"][0])
assert 3 == len(self.ts._timeslices["SN"][0])
with self.assertRaises(KeyError):
......@@ -130,12 +129,15 @@ class TestDAQTimeslices(unittest.TestCase):
with self.assertRaises(KeyError):
assert 0 == len(self.ts._timeslices["L0"][0])
def test_streams(self):
self.ts.stream("L1", 0)
self.ts.stream("SN", 0)
def test_reading_frames(self):
assert 8 == len(self.ts.stream("SN", 1).frames[808447186])
def test_str(self):
s = str(self.ts)
assert "default" in s
assert "L1" in s
assert "SN" in s
......@@ -173,8 +175,227 @@ class TestSummaryslices(unittest.TestCase):
def test_rates(self):
assert 3 == len(self.ss.rates)
class TestGetReate(unittest.TestCase):
def test_fifo(self):
s = self.ss.slices[0]
dct_fifo_stat = {
808981510: True,
808981523: False,
808981672: False,
808974773: False
}
for dom_id, fifo_status in dct_fifo_stat.items():
frame = s[s.dom_id == dom_id]
assert any(get_channel_flags(frame.fifo[0])) == fifo_status
def test_has_udp_trailer(self):
s = self.ss.slices[0]
dct_udp_trailer = {
806451572: True,
806455814: True,
806465101: True,
806483369: True,
806487219: True,
806487226: True,
806487231: True,
808432835: True,
808435278: True,
808447180: True,
808447186: True
}
for dom_id, udp_trailer in dct_udp_trailer.items():
frame = s[s.dom_id == dom_id]
assert has_udp_trailer(frame.fifo[0]) == udp_trailer
def test_high_rate_veto(self):
s = self.ss.slices[0]
dct_high_rate_veto = {
808489014: True,
808489117: False,
808493910: True,
808946818: True,
808951460: True,
808956908: True,
808959411: True,
808961448: True,
808961480: True,
808961504: True,
808961655: False,
808964815: False,
808964852: True,
808969848: False,
808969857: True,
808972593: True,
808972598: True,
808972698: False,
808974758: False,
808974773: True,
808974811: True,
808974972: True,
808976377: True,
808979567: False,
808979721: False,
808979729: False,
808981510: True,
808981523: True,
808981672: False,
808981812: True,
808981864: False,
808982018: False
}
for dom_id, high_rate_veto in dct_high_rate_veto.items():
frame = s[s.dom_id == dom_id]
assert any(get_channel_flags(frame.hrv[0])) == high_rate_veto
def test_max_sequence_number(self):
s = self.ss.slices[0]
dct_seq_numbers = {
808974758: 18,
808974773: 26,
808974811: 25,
808974972: 41,
808976377: 35,
808979567: 20,
808979721: 17,
808979729: 25,
808981510: 35,
808981523: 27,
808981672: 17,
808981812: 34,
808981864: 18,
808982018: 21,
808982041: 27,
808982077: 32,
808982547: 20,
808984711: 26,
808996773: 31,
808997793: 21,
809006037: 26,
809007627: 18,
809503416: 28,
809521500: 31,
809524432: 21,
809526097: 23,
809544058: 21,
809544061: 23
}
for dom_id, max_sequence_number in dct_seq_numbers.items():
frame = s[s.dom_id == dom_id]
assert get_udp_max_sequence_number(
frame.dq_status[0]) == max_sequence_number
def test_number_udp_packets(self):
s = self.ss.slices[0]
dct_n_packets = {
808451904: 27,
808451907: 22,
808469129: 20,
808472260: 21,
808472265: 22,
808488895: 20,
808488990: 20,
808489014: 28,
808489117: 22,
808493910: 26,
808946818: 23,
808951460: 37,
808956908: 33,
808959411: 36,
808961448: 28,
808961480: 24,
808961504: 28,
808961655: 20,
808964815: 20,
808964852: 28,
808969848: 21
}
for dom_id, n_udp_packets in dct_n_packets.items():
frame = s[s.dom_id == dom_id]
assert get_number_udp_packets(frame.dq_status[0]) == n_udp_packets
def test_hrv_flags(self):
s = self.ss.slices[0]
dct_hrv_flags = {
809524432: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
809526097: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
True, False, False, False, False, False, False, False, True,
False, False, False, False
],
809544058: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
809544061: [
False, True, False, False, False, True, False, False, False,
False, False, False, False, False, False, True, False, False,
False, False, False, True, False, False, False, False, False,
False, False, False, False
]
}
for dom_id, hrv_flags in dct_hrv_flags.items():
frame = s[s.dom_id == dom_id]
assert any([
a == b
for a, b in zip(get_channel_flags(frame.hrv[0]), hrv_flags)
])
def test_fifo_flags(self):
s = self.ss.slices[0]
dct_fifo_flags = {
808982547: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
808984711: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
808996773: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
808997793: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
809006037: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False,
False, False, False, False
],
808981510: [
False, False, False, False, False, False, False, False, False,
False, False, False, False, False, True, True, False, False,
False, True, False, True, True, True, True, True, True, False,
False, True, False
]
}
for dom_id, fifo_flags in dct_fifo_flags.items():
frame = s[s.dom_id == dom_id]
assert any([
a == b for a, b in zip(
get_channel_flags(frame.fifo[0]), fifo_flags)
])
class TestGetRate(unittest.TestCase):
def test_zero(self):
assert 0 == get_rate(0)
......@@ -185,4 +406,4 @@ class TestGetReate(unittest.TestCase):
def test_vectorized_input(self):
self.assertListEqual([2054], list(get_rate([1])))
self.assertListEqual([2054, 2111, 2169], list(get_rate([1,2,3])))
self.assertListEqual([2054, 2111, 2169], list(get_rate([1, 2, 3])))
......@@ -184,6 +184,16 @@ class TestOfflineReader(unittest.TestCase):
self.assertEqual(best.size, 9)
self.assertEqual(best['JGANDALF_BETA1_RAD'][:4].tolist(), JGANDALF_BETA1_RAD)
def test_reading_header(self):
# head is the supported format
head = OfflineReader(OFFLINE_NUMUCC).header
self.assertEqual(float(head['DAQ']), 394)
self.assertEqual(float(head['kcut']), 2)
# test the warning for unsupported fheader format
self.assertWarns(UserWarning, self.r.header,
"Your file header has an unsupported format")
class TestOfflineEvents(unittest.TestCase):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment