Skip to content
Snippets Groups Projects
Commit a08732d6 authored by Johannes Schumann's avatar Johannes Schumann
Browse files

Merge branch 'python' into 'master'

Merge python environment

See merge request !1
parents c8aacdc4 5a813d41
No related branches found
No related tags found
1 merge request!1Merge python environment
Pipeline #14504 passed with warnings
# Filename: jobcard.py
"""
Tools for creation of GiBUU jobcards
"""
__author__ = "Johannes Schumann"
__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Johannes Schumann"
__email__ = "jschumann@km3net.de"
__status__ = "Development"
import f90nml
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
INPUT_PATH = "/opt/buuinput2019/"
PROCESS_LOOKUP = {"cc": 2, "nc": 3, "anticc": -2, "antinc": -3}
FLAVOR_LOOKUP = {"electron": 1, "muon": 2, "tau": 3}
PDGID_LOOKUP = {1: 12, 2: 14, 3: 16}
XSECTIONMODE_LOOKUP = {
"integratedSigma": 0,
"dSigmadCosThetadElepton": 1,
"dSigmadQsdElepton": 2,
"dSigmadQs": 3,
"dSigmadCosTheta": 4,
"dSigmadElepton": 5,
"dSigmaMC": 6,
"dSigmadW": 7,
"EXP_dSigmadEnu": 10,
"EXP_dSigmadCosThetadElepton": 11,
"EXP_dSigmadQsdElepton": 12,
"EXP_dSigmadQs": 13,
"EXP_dSigmadCosTheta": 14,
"EXP_dSigmadElepton": 15,
"EXP_dSigmaMC": 16,
"EXP_dSigmadW": 17,
}
class Jobcard(f90nml.Namelist):
"""
A object to manage GiBUU jobcard properties and format them
Parameters
----------
input_path: str
The input path pointing to the GiBUU lookup data which should be used
"""
def __init__(self, *args, **kwargs):
super(Jobcard, self).__init__(*args, **kwargs)
if "input_path" in kwargs:
self.input_path = str(input_path)
del kwargs["input_path"]
else:
self.input_path = INPUT_PATH
self.__getitem__("input")["path_to_input"] = self.input_path
def __getitem__(self, key):
if not self.__contains__(key):
self.__setitem__(key, f90nml.Namelist())
return super(Jobcard, self).__getitem__(key)
def _clean_namelist(self):
for k, v in self.items():
if isinstance(v, f90nml.Namelist) and len(v) == 0:
self.__delitem__(k)
def __str__(self):
self._clean_namelist()
stream = StringIO()
self.write(stream)
return stream.getvalue()
def read_jobcard(filepath):
return Jobcard(f90nml.read(filepath))
def generate_neutrino_jobcard_template(
process,
flavour,
energy,
target,
write_events=False,
input_path=INPUT_PATH): # pragma: no cover
"""
Generate a jobcard for neutrino interaction
Parameters
----------
process: str
Interaction channel ["CC", "NC", "antiCC", "antiNC"]
flavour: str
Flavour ["electron", "muon", "tau"]
energy: float
Initial energy of the neutrino in GeV
target: (int, int)
(Z, A) describing the target nukleon
input_path: str
The input path pointing to the GiBUU lookup data which should be used
"""
jc = Jobcard(input_path)
# NEUTRINO
jc["neutrino_induced"]["process_ID"] = PROCESS_LOOKUP[process.lower()]
jc["neutrino_induced"]["flavour_ID"] = FLAVOR_LOOKUP[flavour.lower()]
jc["neutrino_induced"]["nuXsectionMode"] = 6
jc["neutrino_induced"]["includeDIS"] = True
jc["neutrino_induced"]["includeDELTA"] = True
jc["neutrino_induced"]["includeRES"] = True
jc["neutrino_induced"]["includeQE"] = True
jc["neutrino_induced"]["include1pi"] = True
jc["neutrino_induced"]["include2p2hQE"] = True
jc["neutrino_induced"]["include2pi"] = False
jc["neutrino_induced"]["include2p2hDelta"] = False
jc["neutrino_inducted"]["printAbsorptionXS"] = True
# INPUT
jc["input"]["numTimeSteps"] = 0
jc["input"]["eventtype"] = 5
jc["input"]["numEnsembles"] = 100000
jc["input"]["delta_T"] = 0.2
jc["input"]["localEnsemble"] = True
jc["input"]["num_runs_SameEnergy"] = 1
# TARGET
jc["target"]["Z"] = target[0]
jc["target"]["A"] = target[1]
# MISC
jc["neutrinoAnalysis"]["outputEvents"] = write_events
return jc
# Filename: output.py
"""
IO for km3buu
"""
__author__ = "Johannes Schumann"
__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Johannes Schumann"
__email__ = "jschumann@km3net.de"
__status__ = "Development"
import re
import numpy as np
from io import StringIO
from os import listdir
from os.path import isfile, join, abspath
from tempfile import TemporaryDirectory
import awkward
import uproot
from scipy.interpolate import UnivariateSpline
from scipy.spatial.transform import Rotation
from .jobcard import Jobcard, read_jobcard, PDGID_LOOKUP
EVENT_FILENAME = "FinalEvents.dat"
ROOT_PERT_FILENAME = "EventOutput.Pert.[0-9]{8}.root"
ROOT_REAL_FILENAME = "EventOutput.Real.[0-9]{8}.root"
FLUXDESCR_FILENAME = "neutrino_initialized_energyFlux.dat"
XSECTION_FILENAMES = {"all": "neutrino_absorption_cross_section_ALL.dat"}
PARTICLE_COLUMNS = ["E", "Px", "Py", "Pz", "barcode"]
EVENTINFO_COLUMNS = [
"weight", "evType", "lepIn_E", "lepIn_Px", "lepIn_Py", "lepIn_Pz",
"lepOut_E", "lepOut_Px", "lepOut_Py", "lepOut_Pz", "nuc_E", "nuc_Px",
"nuc_Py", "nuc_Pz"
]
LHE_NU_INFO_DTYPE = np.dtype([
("type", np.int),
("weight", np.float64),
("mom_lepton_in_E", np.float64),
("mom_lepton_in_x", np.float64),
("mom_lepton_in_y", np.float64),
("mom_lepton_in_z", np.float64),
("mom_lepton_out_E", np.float64),
("mom_lepton_out_x", np.float64),
("mom_lepton_out_y", np.float64),
("mom_lepton_out_z", np.float64),
("mom_nucleon_in_E", np.float64),
("mom_nucleon_in_x", np.float64),
("mom_nucleon_in_y", np.float64),
("mom_nucleon_in_z", np.float64),
])
FLUX_INFORMATION_DTYPE = np.dtype([("energy", np.float64),
("flux", np.float64),
("events", np.float64)])
EVENT_TYPE = {
1: "QE",
32: "pi neutron-background",
33: "pi proton-background",
34: "DIS",
35: "2p2h QE",
36: "2p2h Delta",
37: "2pi background",
}
def read_nu_abs_xsection(filepath):
"""
Read the crosssections calculated by GiBUU
Parameters
----------
filepath: str
Filepath to the GiBUU output file with neutrino absorption cross-section
(neutrino_absorption_cross_section_*.dat)
"""
with open(filepath, "r") as f:
lines = f.readlines()
header = re.sub(r"\d+:|#", "", lines[0]).split()
dt = np.dtype([(field, np.float64) for field in header])
values = np.genfromtxt(StringIO(lines[-1]), dtype=dt)
return values
def parse_gibuu_event_info(line):
fields = line.split()[1:]
if int(fields[0]) != 5:
raise NotImplementedError(
"Event information type %s cannot be parsed yet!" % fields[0])
else:
return np.genfromtxt(StringIO(line[3:]), dtype=LHE_NU_INFO_DTYPE)
class GiBUUOutput:
def __init__(self, data_dir):
"""
Class for parsing GiBUU output files
Parameters
----------
data_dir: str
"""
if isinstance(data_dir, TemporaryDirectory):
self._tmp_dir = data_dir
self._data_path = abspath(data_dir.name)
else:
self._data_path = abspath(data_dir)
self.output_files = [
f for f in listdir(self._data_path)
if isfile(join(self._data_path, f))
]
self._read_xsection_file()
self._read_root_output()
self._read_flux_file()
self._read_jobcard()
def _read_root_output(self):
root_pert_regex = re.compile(ROOT_PERT_FILENAME)
self.root_pert_files = list(
filter(root_pert_regex.match, self.output_files))
root_real_regex = re.compile(ROOT_REAL_FILENAME)
self.root_real_files = list(
filter(root_real_regex.match, self.output_files))
def _read_xsection_file(self):
if XSECTION_FILENAMES["all"] in self.output_files:
setattr(
self,
"xsection",
read_nu_abs_xsection(
join(self._data_path, XSECTION_FILENAMES["all"])),
)
def _read_jobcard(self):
jobcard_regex = re.compile(".*.job")
jobcard_files = list(filter(jobcard_regex.match, self.output_files))
if len(jobcard_files) == 1:
self._jobcard_fname = jobcard_files[0]
self.jobcard = read_jobcard(
join(self._data_path, self._jobcard_fname))
else:
self.jobcard = None
def _read_flux_file(self):
fpath = join(self._data_path, FLUXDESCR_FILENAME)
self.flux_data = np.loadtxt(fpath, dtype=FLUX_INFORMATION_DTYPE)
self.flux_interpolation = UnivariateSpline(self.flux_data["energy"],
self.flux_data["events"])
@property
def event_weights(self):
event_df = self.event_info_df
gibuu_wgt = event_df["weight"]
flux = self.flux_interpolation(event_df["lepIn_E"])
energy_min = np.min(self.flux_data["energy"])
energy_max = np.max(self.flux_data["energy"])
total_events = self.flux_interpolation.integral(energy_min, energy_max)
n_files = len(self.root_pert_files)
wgt = np.divide(total_events * gibuu_wgt, flux * n_files)
return wgt
@property
def particle_df(self):
import pandas as pd
df = None
for fname in self.root_pert_files:
fobj = uproot.open(join(self._data_path, fname))
file_df = None
for col in PARTICLE_COLUMNS:
tmp = awkward.topandas(fobj["RootTuple"][col].array(),
flatten=True)
tmp.name = col
if file_df is None:
file_df = tmp
else:
file_df = pd.concat([file_df, tmp], axis=1)
if df is None:
df = file_df
else:
new_indices = file_df.index.levels[0] + df.index.levels[0].max(
) + 1
file_df.index = file_df.index.set_levels(new_indices, level=0)
df = df.append(file_df)
fobj.close()
return df
@property
def event_info_df(self):
import pandas as pd
df = None
for fname in self.root_pert_files:
fobj = uproot.open(join(self._data_path, fname))
event_data = fobj["RootTuple"]
dct = {k: event_data[k].array() for k in EVENTINFO_COLUMNS}
if df is None:
df = pd.DataFrame(dct)
else:
df = df.append(pd.DataFrame(dct), ignore_index=True)
df["By"] = 1 - df.lepOut_E / df.lepIn_E
return df
def write_detector_file(gibuu_output,
ofile="gibuu.aanet.root",
can=(0, 476.5, 403.4),
livetime=3.156e7):
"""
Convert the GiBUU output to a KM3NeT MC (AANET) file
Parameters
----------
gibuu_output: GiBUUOutput
Output object which wraps the information from the GiBUU output files
ofile: str
Output filename
can: tuple
The can dimensions which are used to distribute the events
livetime: float
The data livetime
"""
import aa, ROOT
aafile = ROOT.EventFile()
aafile.set_output(ofile)
mc_event_id = 0
is_cc = False
if gibuu_output.jobcard is None:
raise EnvironmentError("No jobcard provided within the GiBUU output!")
nu_type = PDGID_LOOKUP[gibuu_output.jobcard["neutrino_induced"]
["flavor_id"]]
sec_lep_type = nu_type
ichan = abs(gibuu_output.jobcard["neutrino_induced"]["process_id"])
if ichan == 2:
is_cc = True
sec_lep_type -= 1
if gibuu_output.jobcard["neutrino_induced"]["process_id"] < 0:
nu_type *= -1
sec_lep_type *= -1
for ifile in gibuu_output.root_pert_files:
fobj = uproot.open(ifile)
event_data = fobj["RootTuple"]
for event in event_data.lazyarrays():
aafile.evt.clear()
aafile.evt.id = mc_event_id
aafile.evt.mc_run_id = mc_event_id
mc_event_id += 1
# Vertex Position
r = can[2] * np.sqrt(np.random.uniform(0, 1))
phi = np.random.uniform(0, 2 * np.pi)
pos_x = r * np.cos(phi)
pos_y = r * np.sin(phi)
pos_z = np.random.uniform(can[0], can[1])
vtx_pos = np.array([pos_x, pos_y, pos_z])
# Direction
phi = np.random.uniform(0, 2 * np.pi)
cos_theta = np.random.uniform(-1, 1)
sin_theta = np.sqrt(1 - cos_theta**2)
dir_x = np.cos(phi) * sin_theta
dir_y = np.sin(phi) * sin_theta
dir_z = cos_theta
direction = np.array([dir_x, dir_y, dir_z])
rotation = np.array([dir_y, -dir_x, 0])
sin_rot = np.linalg.norm(rotation)
R = Rotation.from_rotvec(rotation * np.arcsin(sin_rot) / sin_rot)
timestamp = np.random.uniform(0, livetime)
nu_in_trk = ROOT.Trk()
nu_in_trk.id = 0
nu_in_trk.mother_id = -1
nu_in_trk.type = nu_type
nu_in_trk.pos.set(*vtx_pos)
nu_in_trk.dir.set(*direction)
nu_in_trk.E = event.lepIn_E
nu_in_trk.t = timestamp
lep_out_trk = ROOT.Trk()
lep_out_trk.id = 1
lep_out_trk.mother_id = 0
lep_out_trk.type = sec_lep_type
lep_out_trk.pos.set(*vtx_pos)
mom = np.array([event.lepOut_Px, event.lepOut_Py, event.lepOut_Pz])
p_dir = R.apply(mom / np.linalg.norm(mom))
lep_out_trk.dir.set(*p_dir)
lep_out_trk.E = event.lepOut_E
lep_out_trk.t = timestamp
bjorken_y = 1.0 - float(event.lepOut_E / event.lepIn_E)
nu_in_trk.setusr('bx', -1)
nu_in_trk.setusr('by', bjorken_y)
nu_in_trk.setusr('ichan', ichan)
nu_in_trk.setusr("cc", is_cc)
aafile.evt.mc_trks.push_back(nu_in_trk)
aafile.evt.mc_trks.push_back(lep_out_trk)
for i in range(len(event.E)):
trk = ROOT.Trk()
trk.id = i + 2
mom = np.array([event.Px[i], event.Py[i], event.Pz[i]])
p_dir = R.apply(mom / np.linalg.norm(mom))
trk.pos.set(*vtx_pos)
trk.dir.set(*p_dir)
trk.mother_id = 0
trk.type = int(event.barcode[i])
trk.E = event.E[i]
trk.t = timestamp
aafile.evt.mc_trks.push_back(trk)
aafile.write()
# if mc_event_id > 100:
# break
del aafile
#!/usr/bin/env python
# coding=utf-8
# Filename: test_ctrl.py
__author__ = "Johannes Schumann"
__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Johannes Schumann"
__email__ = "jschumann@km3net.de"
__status__ = "Development"
import unittest
import numpy as np
from km3buu.jobcard import *
from km3buu.ctrl import run_jobcard
from tempfile import TemporaryDirectory
from os import listdir
from os.path import abspath, join, dirname
from thepipe.logger import get_logger
JOBCARD_FOLDER = abspath(join(dirname(__file__), "../../jobcards"))
# class TestCTRLmisc(unittest.TestCase):
# def test_invalid_jobcard(self):
class TestCTRLbyJobcardFile(unittest.TestCase):
def setUp(self):
self.filename = join(JOBCARD_FOLDER, "examples/example.job")
self.output_dir = TemporaryDirectory()
self.retval = run_jobcard(self.filename, self.output_dir.name)
log = get_logger("ctrl.py")
log.setLevel("INFO")
def test_output(self):
assert self.retval == 0
def test_output_files_existing(self):
files = listdir(self.output_dir.name)
assert "FinalEvents.dat" in files
class TestCTRLbyJobcardObject(unittest.TestCase):
def setUp(self):
log = get_logger("ctrl.py")
log.setLevel("INFO")
self.test_jobcard = Jobcard()
# NEUTRINO
self.test_jobcard["neutrino_induced"]["process_ID"] = PROCESS_LOOKUP[
"cc"]
self.test_jobcard["neutrino_induced"]["flavor_ID"] = FLAVOR_LOOKUP[
"electron"]
self.test_jobcard["neutrino_induced"][
"nuXsectionMode"] = XSECTIONMODE_LOOKUP["dSigmaMC"]
self.test_jobcard["neutrino_induced"]["includeDIS"] = True
self.test_jobcard["neutrino_induced"]["printAbsorptionXS"] = True
self.test_jobcard["nl_SigmaMC"]["enu"] = 1
# INPUT
self.test_jobcard["input"]["numTimeSteps"] = 0
self.test_jobcard["input"]["eventtype"] = 5
self.test_jobcard["input"]["numEnsembles"] = 1
self.test_jobcard["input"]["delta_T"] = 0.2
self.test_jobcard["input"]["localEnsemble"] = True
self.test_jobcard["input"]["num_runs_SameEnergy"] = 1
self.test_jobcard["input"]["LRF_equals_CALC_frame"] = True
# TARGET
self.test_jobcard["target"]["target_Z"] = 1
self.test_jobcard["target"]["target_A"] = 1
# MISC
# self.test_jobcard["nl_neutrinoxsection"]["DISmassless"] = True
self.test_jobcard["neutrinoAnalysis"]["outputEvents"] = True
self.test_jobcard["pythia"]["PARP(91)"] = 0.44
self.output_dir = TemporaryDirectory()
self.retval = run_jobcard(self.test_jobcard, self.output_dir.name)
# raise Exception(self.test_jobcard)
def test_output(self):
assert self.retval == 0
def test_output_files_existing(self):
files = listdir(self.output_dir.name)
assert "FinalEvents.dat" in files
#!/usr/bin/env python
# coding=utf-8
# Filename: test_environment.py
__author__ = "Johannes Schumann"
__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Johannes Schumann"
__email__ = "jschumann@km3net.de"
__status__ = "Development"
import unittest
from unittest.mock import patch
from km3buu.environment import *
from os.path import dirname, join
from spython.main import Client
from km3buu import DOCKER_URL, IMAGE_NAME
class TestBuild(unittest.TestCase):
def test_wrong_dir_path(self):
wrong_path = "foobar"
try:
build_image(wrong_path)
assert False
except OSError as e:
assert str(e) == "Directory not found!"
@patch.object(Client, 'build', return_value=123)
def test_build_cmd(self, function):
existing_path = dirname(__file__)
assert build_image(existing_path) == 123
expected_image_path = join(existing_path, IMAGE_NAME)
function.assert_called_once_with(DOCKER_URL,
image=expected_image_path,
sudo=False,
ext="simg")
#!/usr/bin/env python
# coding=utf-8
# Filename: test_jobcard.py
__author__ = "Johannes Schumann"
__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Johannes Schumann"
__email__ = "jschumann@km3net.de"
__status__ = "Development"
import unittest
import numpy as np
from km3buu.jobcard import Jobcard, INPUT_PATH
class TestJobcard(unittest.TestCase):
def setUp(self):
self.test_jobcard = Jobcard()
# Insert some test elements
self.test_jobcard["ABC"]["def"] = 42
def test_input_path(self):
expected_line = "path_to_input = '%s'" % INPUT_PATH
ctnt = str(self.test_jobcard)
group_start = ctnt.find("&input")
group_end = ctnt.find("/\n", group_start)
assert ctnt[group_start:group_end].find(expected_line) != -1
def test_elements(self):
ctnt = str(self.test_jobcard)
expected_line = "def = 42"
group_start = ctnt.find("&abc")
group_end = ctnt.find("/", group_start)
print(ctnt)
assert ctnt[group_start:group_end].find(expected_line) != -1
def test_remove_elements(self):
del self.test_jobcard["ABC"]["def"]
ctnt = str(self.test_jobcard)
expected_line = "def = 42"
assert ctnt.find("&ABC") == -1
assert ctnt.find(expected_line) == -1
#!/usr/bin/env python
# coding=utf-8
# Filename: test_output.py
__author__ = "Johannes Schumann"
__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
__credits__ = []
__license__ = "MIT"
__maintainer__ = "Johannes Schumann"
__email__ = "jschumann@km3net.de"
__status__ = "Development"
import unittest
import numpy as np
from km3buu.output import *
from os import listdir
from os.path import abspath, join, dirname
from km3net_testdata import data_path
TESTDATA_DIR = data_path("gibuu")
class TestXSection(unittest.TestCase):
def test_xsection_all(self):
filename = join(TESTDATA_DIR, XSECTION_FILENAMES["all"])
xsection = read_nu_abs_xsection(filename)
self.assertAlmostEqual(xsection['var'], 58.631)
self.assertAlmostEqual(xsection['sum'], 8.0929)
self.assertAlmostEqual(xsection['Delta'], 0.26805)
self.assertAlmostEqual(xsection['highRES'], 0.14248)
class TestGiBUUOutput(unittest.TestCase):
def setUp(self):
self.output = GiBUUOutput(TESTDATA_DIR)
def test_attr(self):
assert hasattr(self.output, "event_info_df")
assert hasattr(self.output, "particle_df")
[pytest]
docstyle_convetion = numpy
junit_family=xunit1
numpydoc
pydocstyle
pytest
pytest-cov
pytest-flake8
pytest-pylint
pytest-watch
sphinx-rtd-theme
sphinx
sphinxcontrib-napoleon
sphinxcontrib-programoutput
sphinxcontrib-websupport
sphinx-autoapi
setuptools_scm
yapf>=0.25
km3net-testdata>=0.2.11
using CSV
"""
read_final_events(filepath::AbstractString)
Function for reading the final events from the GiBUU output
# Arguments
- `filepath::AbstractString`: filepath to the FinalEvents.dat
"""
function read_final_events(filepath::AbstractString)
file = open(filepath)
header = readline(file)
close(file)
raw_col_names = split(header)[2:end]
col_names = [String.(split(col,":"))[end] for col in raw_col_names]
CSV.read(filepath,
header=col_names,
delim=' ',
comment="#",
ignorerepeated=true,
types=[Int32,
Int32,
Int32,
Int32,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Float64,
Int32,
Int32,
Float64
])
end
setup.py 0 → 100644
#!usr/bin/env python
# -*- coding: utf-8 -*-
# Filename: setup.py
"""
KM3BUU setup script.
"""
import os
import tempfile
from setuptools import setup, find_packages
PACKAGE_NAME = 'km3buu'
URL = 'https://git.km3net.de/simulation/km3buu'
DESCRIPTION = 'GiBUU tools for KM3NeT'
__author__ = 'Johannes Schumann'
__email__ = 'jschumann@km3net.de'
with open('requirements.txt') as fobj:
REQUIREMENTS = [l.strip() for l in fobj.readlines()]
with open('requirements-dev.txt') as fobj:
DEV_REQUIREMENTS = [l.strip() for l in fobj.readlines()]
setup(
name=PACKAGE_NAME,
url=URL,
description=DESCRIPTION,
author=__author__,
author_email=__email__,
packages=find_packages(),
include_package_data=True,
platforms='any',
setup_requires=['setuptools_scm'],
use_scm_version={
'write_to': '{}/version.txt'.format(PACKAGE_NAME),
'tag_regex': r'^(?P<prefix>v)?(?P<version>[^\+]+)(?P<suffix>.*)?$',
},
install_requires=REQUIREMENTS,
extras_require={'dev': DEV_REQUIREMENTS},
python_requires='>=3.0',
entry_points={'console_scripts': ['km3buu=km3buu.cmd:main']},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Programming Language :: Python',
],
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment