diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000000000000000000000000000000000000..82def346f525d4d53ef0312111a8ee76e9b04880
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,17 @@
+[run]
+source = km3buu
+
+[report]
+include =
+    km3buu/*
+omit =
+    */tests/*
+exclude_lines =
+    pragma: no cover
+    raise AssertionError
+    raise NotImplementedError
+    if 0:
+    if __name__ == .__main__.:
+    if self.debug:
+    if settings.DEBUG
+    def __repr__
diff --git a/.gitignore b/.gitignore
index 895472b1ebeefdcdd6c1e826aebecf808ff9e509..80c82d0c089f097cabee124d463b4d71bf441720 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,3 +4,45 @@
 # GiBUU
 *.dat
 output/
+
+# Version info for PyPI
+km3buu/version.py
+
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*.pyxbldc
+
+# Distribution / packaging
+.Python
+env/
+bin/
+build/
+develop-eggs/
+dist/
+eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+*.egg-info/
+.installed.cfg
+*.egg
+.eggs
+
+# venv, pyenv tmp
+.python-version
+venv
+
+# Sphinx documentation
+doc/_build/
+doc/auto_examples/
+doc/modules/
+doc/api
+
+# 
+junit*.xml
+reports
+
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 375f1d08048d7fd11bf8f2c4fdc93cbcd841f178..9489755f4d3eb7d03af1892cf9b969ebd135dc0b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,40 +1,133 @@
+image: docker.km3net.de/base/singularity-py3:3.5.3
+
 variables:
+    PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
     DOCKER_HOST: tcp://docker:2375
     DOCKER_DRIVER: overlay2
     CONTAINER_TEST_IMAGE: docker.km3net.de/simulation/km3buu:$CI_COMMIT_REF_SLUG
     CONTAINER_RELEASE_IMAGE: docker.km3net.de/simulation/km3buu:$CI_COMMIT_TAG
     CONTAINER_LATEST_IMAGE: docker.km3net.de/simulation/km3buu:latest
 
+cache:
+  paths:
+   - .cache/pip
+   - venv/
+   - GiBUU.simg
+  key: "$CI_COMMIT_REF_SLUG"
+
 stages:
+    - test
+    - coverage
     - docker
+    - reset_cache_image
     - release
+    - doc
+
+reset_test_image:
+    stage: reset_cache_image
+    cache:
+        paths:
+            - GiBUU.simg
+    script:
+        - rm GiBUU.simg
+    only:
+        - tags
+
+.virtualenv_template: &virtualenv_definition |
+  python -V
+  pip install virtualenv
+  virtualenv venv
+  source venv/bin/activate
+  pip install -U pip setuptools yapf
+  if [ ! -f GiBUU.simg ]; then make buildremote; fi
+  make install-dev
+  pip list
+
+.junit_template: &junit_definition
+    artifacts:
+      reports:
+        junit: "reports/junit*.xml"
+
+
+test:
+    image: docker.km3net.de/base/singularity-py3:3.5.3
+    stage: test
+    tags:
+        - docker
+    script:
+        - *virtualenv_definition
+        - make test
+    <<: *junit_definition
+
+code-style:
+    image: docker.km3net.de/base/singularity-py3:3.5.3
+    stage: test
+    script:
+        - *virtualenv_definition
+        - yapf -r -d -e "venv" ./km3buu
+    allow_failure: true
+
+coverage:
+    image: docker.km3net.de/base/singularity-py3:3.5.3
+    stage: coverage
+    tags:
+        - docker
+    script:
+        - *virtualenv_definition
+        - "make test-cov|grep TOTAL| awk '{printf \"COVERAGE: %.2f%%\", (1-$3/$2)*100 }'"
+    coverage: '/COVERAGE:\s*([0-9]*\.[0-9]*%)/'
+    #     - make test-cov
+    # coverage: '/TOTAL.+ ([0-9]{1,3}%)/'
+    artifacts:
+        paths:
+            - reports/coverage
+
 
 docker:
-   image: docker:stable
-   services:
-     - docker:dind
-   stage: docker
-   script:
-     - docker build --pull -t $CONTAINER_TEST_IMAGE .
-     - docker push $CONTAINER_TEST_IMAGE
-   tags:
-     - docker
-   only:
-     - tags
+  image: docker:stable
+  services:
+    - docker:dind
+  stage: docker
+  script:
+    - docker build --pull -t $CONTAINER_TEST_IMAGE .
+    - docker push $CONTAINER_TEST_IMAGE
+  tags:
+    - docker
+  only:
+    - tags
 
 release-image:
-   image: docker:stable
-   services:
-     - docker:dind
-   stage: release
-   script:
-     - docker pull $CONTAINER_TEST_IMAGE
-     - docker tag $CONTAINER_TEST_IMAGE $CONTAINER_RELEASE_IMAGE
-     - docker tag $CONTAINER_TEST_IMAGE $CONTAINER_LATEST_IMAGE
-     - docker push $CONTAINER_RELEASE_IMAGE
-     - docker push $CONTAINER_LATEST_IMAGE
-   tags:
-     - docker
-   only:
-     - tags
+  image: docker:stable
+  services:
+    - docker:dind
+  stage: release
+  script:
+    - docker pull $CONTAINER_TEST_IMAGE
+    - docker tag $CONTAINER_TEST_IMAGE $CONTAINER_RELEASE_IMAGE
+    - docker tag $CONTAINER_TEST_IMAGE $CONTAINER_LATEST_IMAGE
+    - docker push $CONTAINER_RELEASE_IMAGE
+    - docker push $CONTAINER_LATEST_IMAGE
+  tags:
+    - docker
+  only:
+    - tags
+
+pages:
+    image: docker.km3net.de/base/python:3.6
+    stage: doc
+    script:
+        - make install-dev
+        - cd doc && make clean && cd ..
+        - make doc
+        - mv doc/_build/html public/
+        - mv reports/coverage public/coverage
+    artifacts:
+        paths:
+            - public
+    cache: {}
+    only:
+        - tags
+        - master
+        - python
+
 
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
new file mode 100644
index 0000000000000000000000000000000000000000..38daa87607f194b4f499f47353cf291674537e39
--- /dev/null
+++ b/CHANGELOG.rst
@@ -0,0 +1,12 @@
+Unreleased changes
+------------------
+
+* None
+
+
+Version 0
+---------
+
+0.2.0 / 2020-02-08
+~~~~~~~~~~~~~~~~~~
+* Initial commit
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000000000000000000000000000000000000..d900199a131b24b2e6885bcee2caa4a2a73aaed6
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,4 @@
+Contributing
+------------
+
+Do it!
diff --git a/Makefile b/Makefile
index e13bd2ba16b175c6441fcd1fb7eb074b16bc2825..d537c66ba2da0d3999ef47f934f94a27e963bf70 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,6 @@
+PKGNAME=km3buu
+ALLNAMES = $(PKGNAME)
+
 export REPO_OUTPUT_DIR := output
 export REPO_JOBCARDS_DIR := jobcards
 export CONTAINER_OUTPUT_DIR := /opt/output
@@ -8,7 +11,6 @@ default: run
 build: km3buu.Singularity
 	sudo singularity build GiBUU.simg km3buu.Singularity 
 
-
 run: GiBUU.simg
 	@if [ ! -d "jobcards/${CARDSET}" ];then \
 	    exit 1; \
@@ -32,5 +34,29 @@ buildremote:
 
 clean:
 	@rm -rf output
+	python setup.py clean --all
+
+### PYTHON ###
+install:
+	pip install .
+
+install-dev:
+	pip install -e ".[dev]"
+
+test:
+	python -m pytest --junitxml=./reports/junit.xml -o junit_suite_name=$(PKGNAME) $(PKGNAME)
+
+test-cov:
+	python -m pytest --cov ./ --cov-report term-missing --cov-report xml:reports/coverage.xml --cov-report html:reports/coverage $(ALLNAMES)
+
+flake8: 
+	python -m pytest --flake8
+
+docstyle: 
+	python -m pytest --pydocstyle
 
+doc:
+	cd doc && make html
+	cd ..
 
+.PHONY: install install-dev doc clean test test-cov flake8 docstyle buildremote
diff --git a/README.md b/README.md
deleted file mode 100644
index 4acd73c831dd3debdfd57b35bb6d7067495c2a66..0000000000000000000000000000000000000000
--- a/README.md
+++ /dev/null
@@ -1,39 +0,0 @@
-# KM3BUU
-
-The KM3BUU project is an integrated environment for the GiBUU studies within the KM3NeT experiment.
-
-## Installation
-The project is based on images using `singularity`, for which version 3 or higher (e.g. [v3.4](https://sylabs.io/guides/3.4/user-guide/)) is required. This is done due to the intention to provide a comparable installation on all systems and thus make the results 
-easily reproducible. The main project control is based on `make`.
-In order to apply installation commands presented within this section, clone this repository and change to the project directory:
-```
-git clone https://git.km3net.de/simulation/km3buu
-cd km3buu
-```
-
-
-### Local Machine
-By "Local Machine" a computer where are root (administrative) privileges are available is 
-meant. These root privileges are required to build the singularity image by yourself. In order to start the build, run the following `make` command:
-```
-make build
-```
-
-### Compute Cluster
-In order to make this project also usable in a non-root environment, the Image is also provided via the KM3NeT Docker-Server. Within KM3NeT computing infrastructure this is the case for the lyon compute cluster, thus this case is customised for this environment.
-
-In order to build the singularity image based on the remote image, run the following `make` command:
-```
-make buildremote
-```
-
-## Structure & Usage
-
-The used GiBUU jobcards are located in a sub-folder within the jobcards folder.
-Each sub-folder represents a set of jobcards, which can be processed by:
-
-```
-make run CARDSET=examples
-```
-
-This command runs all jobcards within the `jobcards/examples` folder and writes the output it to the folder `output`. The folder structure is applied from the `jobcards`folder.
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000000000000000000000000000000000000..a05b2136519f4907f16be9703e1c583ba7914801
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,63 @@
+KM3BUU
+======
+
+The KM3BUU project is an integrated environment for the GiBUU studies
+within the KM3NeT experiment.
+
+Installation
+------------
+
+The project is based on images using ``singularity``, for which version
+3 or higher (e.g. `v3.4 <https://sylabs.io/guides/3.4/user-guide/>`__)
+is required. This is done due to the intention to provide a comparable
+installation on all systems and thus make the results easily
+reproducible. The main project control is based on ``make``. In order to
+apply installation commands presented within this section, clone this
+repository and change to the project directory:
+
+::
+
+   git clone https://git.km3net.de/simulation/km3buu
+   cd km3buu
+
+Local Machine
+~~~~~~~~~~~~~
+
+By “Local Machine” a computer where are root (administrative) privileges
+are available is meant. These root privileges are required to build the
+singularity image by yourself. In order to start the build, run the
+following ``make`` command:
+
+::
+
+   make build
+
+Compute Cluster
+~~~~~~~~~~~~~~~
+
+In order to make this project also usable in a non-root environment, the
+Image is also provided via the KM3NeT Docker-Server. Within KM3NeT
+computing infrastructure this is the case for the lyon compute cluster,
+thus this case is customised for this environment.
+
+In order to build the singularity image based on the remote image, run
+the following ``make`` command:
+
+::
+
+   make buildremote
+
+Structure & Usage
+-----------------
+
+The used GiBUU jobcards are located in a sub-folder within the jobcards
+folder. Each sub-folder represents a set of jobcards, which can be
+processed by:
+
+::
+
+   make run CARDSET=examples
+
+This command runs all jobcards within the ``jobcards/examples`` folder
+and writes the output it to the folder ``output``. The folder structure
+is applied from the ``jobcards``\ folder.
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..1eb8703bb98ee0b7486bb410e82571107353a290
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,195 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER	      =
+BUILDDIR      = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4	= -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS	= -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS	= $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: default help clean html html-noplot dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+default: html
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html	    to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json	    to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub	    to make an epub"
+	@echo "  latex	    to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text	    to make text files"
+	@echo "  man	    to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info	    to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml	    to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+	@echo "  view       to open HTML output in browser"
+
+doc-dependencies:
+	pip install -Ur ../requirements.txt
+
+clean:
+	rm -rf $(BUILDDIR)/*
+	rm -rf auto_examples/
+	rm -rf modules/generated/*
+	rm -rf api/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+html-noplot:
+	$(SPHINXBUILD) -D plot_gallery=0 -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/KM3Pipe.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/KM3Pipe.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/KM3Pipe"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/KM3Pipe"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
+
+.PHONY: view
+view:
+	$(BROWSER) $(BUILDDIR)/html/index.html
diff --git a/doc/changelog.rst b/doc/changelog.rst
new file mode 100644
index 0000000000000000000000000000000000000000..4de03af31303a9f74b6205d9660e492bff7653e7
--- /dev/null
+++ b/doc/changelog.rst
@@ -0,0 +1,4 @@
+Changelog
+=========
+
+.. include:: ../CHANGELOG.rst
diff --git a/doc/conf.py b/doc/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..58d530ba8095e369539e09a6077e8ae8cc6736cb
--- /dev/null
+++ b/doc/conf.py
@@ -0,0 +1,273 @@
+# -*- coding: utf-8 -*-
+#
+# KM3Pipe documentation build configuration file, created by
+# sphinx-quickstart on Sat Oct  4 19:16:43 2014.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+from datetime import date
+import sphinx_rtd_theme
+from pkg_resources import get_distribution
+
+import km3buu
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+# needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.autodoc',
+    'sphinx.ext.autosummary',
+    'sphinx.ext.doctest',
+    'sphinx.ext.imgmath',
+    'sphinx.ext.viewcode',
+    'autoapi.extension',
+    'numpydoc',
+    'sphinxcontrib.programoutput',
+]
+
+autosummary_generate = True
+
+# Document Python Code
+autoapi_type = 'python'
+autoapi_dirs = ['../km3buu']
+autoapi_options = [
+    'members', 'undoc-members'
+    # , 'private-members', 'special-members'
+]
+autoapi_ignore = ["*/tests/*", "*test_*.py", "*/doc/conf.py"]
+autoapi_include_summaries = True
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+# The full version, including alpha/beta/rc tags.
+release = get_distribution('km3buu').version
+# The short X.Y version.
+version = '.'.join(release.split('.')[:2])
+short_version = '.'.join(version.split('.')[:2])
+
+
+# General information about the project.
+project = "KM3BUU {}".format(short_version)
+
+copyright = u'{0}, Johannes Schumann'.format(date.today().year)
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+# language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build', '_templates', '**.ipynb_checkpoints']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# -- Options for HTML output ----------------------------------------------
+
+html_theme = 'sphinx_rtd_theme'
+html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+# html_theme_options = {}
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+html_title = "KM3BUU {}".format(short_version)
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+# html_short_title = 'Home'
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = "_static/foo_logo_small_white.png"
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+html_sidebars = {
+    '**': ['globaltoc.html', 'searchbox.html'],
+}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'KM3BUUdoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    # 'papersize': 'letterpaper',
+
+    # The font size ('10pt', '11pt' or '12pt').
+    # 'pointsize': '10pt',
+
+    # Additional stuff for the LaTeX preamble.
+    # 'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+    ('index', 'KM3BUU.tex', u'KM3BUU Documentation', u'Johannes Schumann',
+     'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [('index', 'km3buu', u'KM3BUU Documentation',
+              [u'Johannes Schumann'], 1)]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = []
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+# don't show the method summary twice
+numpydoc_show_class_members = False
diff --git a/doc/contribute.rst b/doc/contribute.rst
new file mode 100644
index 0000000000000000000000000000000000000000..19b90657d8a6a212534c71c9c023f5eef097f58a
--- /dev/null
+++ b/doc/contribute.rst
@@ -0,0 +1,5 @@
+Contribute
+==========
+
+You want to hack new features into ``foo`` or are just here to fix a
+bug? Great!
diff --git a/doc/index.rst b/doc/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..87e20a8f1a1a055d0758d2028b4e1aba55488c28
--- /dev/null
+++ b/doc/index.rst
@@ -0,0 +1,19 @@
+.. include:: ../README.rst
+
+.. toctree::
+    :hidden:
+    :titlesonly:
+
+    self
+
+.. toctree::
+    :maxdepth: 2
+
+    user_guide
+    contribute
+    changelog
+
+    Code Coverage <https://simulation.pages.km3net.de/km3buu/coverage>
+    Source (Git) <https://git.km3net.de/simulation/km3buu>
+
+* :ref:`genindex`
diff --git a/doc/user_guide.rst b/doc/user_guide.rst
new file mode 100644
index 0000000000000000000000000000000000000000..47a9ee63dbb8d214f660d0060c280589cbcc347a
--- /dev/null
+++ b/doc/user_guide.rst
@@ -0,0 +1,4 @@
+User GUide
+==========
+
+Install ``km3buu`` and have fun!
diff --git a/jobcards/examples/example.job b/jobcards/examples/example.job
index f7935955e6f64b4d06239b8b9910f15f3364e5ae..9a36c16efe8e0d2dbf358fb73610dd5beb3f66c3 100644
--- a/jobcards/examples/example.job
+++ b/jobcards/examples/example.job
@@ -3,7 +3,7 @@
 	flavor_ID = 2 		! 1:electron, 2:muon, 3:tau
 	nuXsectionMode = 6 	! 6: dSigmaMC
 	includeDIS = .true. 	! enables DIS events
-	printAbsorptionXS = T
+	printAbsorptionXS = .true.
 /
 
 &target
@@ -44,7 +44,7 @@
 
 
 &neutrinoAnalysis
-	outputEvents = .true	! output list of events and
+	outputEvents = .true.	! output list of events and
 				! all outgoing particles in
 				! each event to the file 
 				! FinalEvents.dat
diff --git a/km3buu/__init__.py b/km3buu/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8e383a1c139601de965bc41eba2c462bc9ff1053
--- /dev/null
+++ b/km3buu/__init__.py
@@ -0,0 +1,10 @@
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+IMAGE_NAME = "GiBUU.simg"
+DOCKER_URL = "docker://docker.km3net.de/simulation/km3buu:latest"
diff --git a/km3buu/__version__.py b/km3buu/__version__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5996d319ed7c18627a66a822f2ff6b14cfdd483f
--- /dev/null
+++ b/km3buu/__version__.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Filename: __version__.py
+"""
+Pep 386 compliant version info.
+
+    (major, minor, micro, alpha/beta/rc/final, #)
+    (1, 1, 2, 'alpha', 0) => "1.1.2.dev"
+    (1, 2, 0, 'beta', 2) => "1.2b2"
+
+"""
+from os.path import dirname, realpath, join
+
+from setuptools_scm import get_version
+
+version = "unknown version"
+
+try:
+    version = get_version(root="..", relative_to=__file__)
+except LookupError:
+    with open(join(realpath(dirname(__file__)), "version.txt"), "r") as fobj:
+        version = fobj.read()
diff --git a/km3buu/config.py b/km3buu/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b554f0638d6bffae91cd007808dc18042803558
--- /dev/null
+++ b/km3buu/config.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Filename: config.py
+# Author: Johannes Schumann <jschumann@km3net.de>
+"""
+
+
+"""
+import os
+import click
+from os.path import isfile, isdir, join, dirname, abspath
+from configparser import ConfigParser, Error, NoOptionError, NoSectionError
+from thepipe.logger import get_logger
+from . import IMAGE_NAME
+from .environment import build_image
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+CONFIG_PATH = os.path.expanduser("~/.km3buu/config")
+
+log = get_logger(__name__)
+
+
+class Config(object):
+    def __init__(self, config_path=CONFIG_PATH):
+        self.config = ConfigParser()
+        self._config_path = config_path
+        if isfile(self._config_path):
+            self.config.read(self._config_path)
+        else:
+            os.makedirs(dirname(CONFIG_PATH), exist_ok=True)
+
+    def set(self, section, key, value):
+        if section not in self.config.sections():
+            self.config.add_section(section)
+        self.config.set(section, key, value)
+        with open(self._config_path, "w") as f:
+            self.config.write(f)
+
+    def get(self, section, key, default=None):
+        try:
+            value = self.config.get(section, key)
+            try:
+                return float(value)
+            except ValueError:
+                return value
+        except (NoOptionError, NoSectionError):
+            return default
+
+    @property
+    def gibuu_image_path(self):
+        section = "GiBUU"
+        key = "image_path"
+        image_path = self.get(section, key)
+        if image_path is None or not isfile(image_path):
+            dev_path = abspath(join(dirname(__file__), os.pardir, IMAGE_NAME))
+            if isfile(dev_path):
+                image_path = dev_path
+            elif click.confirm("Is the GiBUU image already available?", default=False):
+                image_path = click.prompt(
+                    "GiBUU image path?", type=click.Path(exists=True, dir_okay=False)
+                )
+            elif click.confirm("Install image from remote?", default=True):
+                default_dir = join(os.environ["HOME"], ".km3buu")
+                image_dir = click.prompt(
+                    "GiBUU image path (default: ~/.km3buu) ?",
+                    type=click.Path(exists=True, file_okay=False),
+                    default=default_dir,
+                )
+                image_path = build_image(image_dir)
+            self.set(section, key, image_path)
+        return image_path
+
+    @gibuu_image_path.setter
+    def gibuu_image_path(self, value):
+        section = "GiBUU"
+        key = "image_path"
+        self.set(section, key, value)
diff --git a/km3buu/ctrl.py b/km3buu/ctrl.py
new file mode 100644
index 0000000000000000000000000000000000000000..14dec21323ba5bf258c8bc93f07e1ec8c8705ce1
--- /dev/null
+++ b/km3buu/ctrl.py
@@ -0,0 +1,102 @@
+# Filename: ctrl.py
+"""
+Run and control tools for GiBUU
+
+"""
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import os
+from spython.main import Client
+from os.path import join, abspath, basename, isdir, isfile
+from tempfile import NamedTemporaryFile, TemporaryDirectory
+from thepipe.logger import get_logger
+
+from . import IMAGE_NAME
+from .config import Config
+from .jobcard import Jobcard, read_jobcard
+from .environment import is_singularity_version_greater, MIN_SINGULARITY_VERSION
+
+log = get_logger(basename(__file__))
+
+if not is_singularity_version_greater(
+        MIN_SINGULARITY_VERSION):  # pragma: no cover
+    log.error("Singularity version lower than %s" % MIN_SINGULARITY_VERSION)
+    raise OSError("Singularity version below %s" % MIN_SINGULARITY_VERSION)
+
+GIBUU_SHELL = """
+#!/bin/bash
+
+export LD_LIBRARY_PATH=/usr/local/lib
+
+if [ -z "$CONTAINER_GIBUU_EXEC+x" ];
+then
+    echo "No GIBUU executable provided via CONTAINER_GIBUU_EXEC";
+    exit 1
+fi;
+
+cd {0};
+
+$CONTAINER_GIBUU_EXEC < {1};
+"""
+
+
+def run_jobcard(jobcard, outdir, fluxfile=None):
+    """
+    Method for run
+
+    Parameters
+    ----------
+    jobcard: str, km3buu.JobCard
+        The jobcard which should be run, which can be an instance
+        of a jobcard object or a path to a jobcard
+    outdir: str 
+        The path to the directory the output should be written to.
+    fluxfile: str
+        Filepath of the custom flux file if initNeutrino/nuExp = 99
+    """
+    input_dir = TemporaryDirectory()
+    outdir = abspath(outdir)
+    log.info("Create temporary file for jobcard")
+    jobcard_fpath = join(input_dir.name, "tmp.job")
+
+    if isinstance(jobcard, str) and isfile(jobcard):
+        jobcard = read_jobcard(jobcard)
+
+    if "neutrino_induced" in jobcard and "nuexp" in jobcard[
+            "neutrino_induced"] and jobcard["neutrino_induced"]["nuexp"] == 99:
+        if fluxfile is None or not isfile(fluxfile):
+            raise IOError("Fluxfile not found!")
+        tmp_fluxfile = join(input_dir.name, basename(fluxfile))
+        os.system("cp %s %s" % (fluxfile, tmp_fluxfile))
+        log.info("Set FileNameFlux to: %s" % tmp_fluxfile)
+        jobcard["neutrino_induced"]["FileNameflux"] = tmp_fluxfile
+    if isinstance(jobcard, Jobcard):
+        with open(jobcard_fpath, "w") as f:
+            f.write(str(jobcard))
+    else:
+        log.error("No valid jobcard reference given: %s" % jobcard)
+    log.info("Create temporary file for associated runscript")
+    script_fpath = join(input_dir.name, "run.sh")
+    with open(script_fpath, "w") as f:
+        ctnt = GIBUU_SHELL.format(outdir, jobcard_fpath)
+        f.write(ctnt)
+    output = Client.execute(
+        Config().gibuu_image_path,
+        ["/bin/sh", script_fpath],
+        bind=[outdir, input_dir.name],
+        return_result=True,
+    )
+    msg = output["message"]
+    if isinstance(msg, str):
+        log.info("GiBUU output:\n %s" % msg)
+    else:
+        log.info("GiBUU output:\n %s" % msg[0])
+        log.error("GiBUU stacktrace:\n%s" % msg[1])
+    return output["return_code"]
diff --git a/km3buu/environment.py b/km3buu/environment.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a4c3f03f4b9b05b6263086e16d244a657719cdc
--- /dev/null
+++ b/km3buu/environment.py
@@ -0,0 +1,39 @@
+# Filename: environment.py
+"""
+Core functions for the package environment
+
+"""
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import os
+from spython.main import Client
+from spython.utils import get_singularity_version
+from os.path import join, isdir, basename
+from thepipe.logger import get_logger
+from distutils.version import LooseVersion
+
+from . import IMAGE_NAME, DOCKER_URL
+
+log = get_logger(basename(__file__))
+
+MIN_SINGULARITY_VERSION = "3.3"
+
+
+def is_singularity_version_greater(min_version):  # pragma: no cover
+    singularity_version = LooseVersion(get_singularity_version().split()[-1])
+    retval = singularity_version > LooseVersion(MIN_SINGULARITY_VERSION)
+    return retval
+
+
+def build_image(output_dir):
+    if not isdir(output_dir):
+        raise OSError("Directory not found!")
+    image_path = join(output_dir, IMAGE_NAME)
+    return Client.build(DOCKER_URL, image=image_path, sudo=False, ext="simg")
diff --git a/km3buu/jobcard.py b/km3buu/jobcard.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa2ee6dd4d46623edcf3034051d617761d51e884
--- /dev/null
+++ b/km3buu/jobcard.py
@@ -0,0 +1,136 @@
+# Filename: jobcard.py
+"""
+Tools for creation of GiBUU jobcards
+
+"""
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import f90nml
+
+try:
+    from StringIO import StringIO
+except ImportError:
+    from io import StringIO
+
+INPUT_PATH = "/opt/buuinput2019/"
+
+PROCESS_LOOKUP = {"cc": 2, "nc": 3, "anticc": -2, "antinc": -3}
+FLAVOR_LOOKUP = {"electron": 1, "muon": 2, "tau": 3}
+PDGID_LOOKUP = {1: 12, 2: 14, 3: 16}
+XSECTIONMODE_LOOKUP = {
+    "integratedSigma": 0,
+    "dSigmadCosThetadElepton": 1,
+    "dSigmadQsdElepton": 2,
+    "dSigmadQs": 3,
+    "dSigmadCosTheta": 4,
+    "dSigmadElepton": 5,
+    "dSigmaMC": 6,
+    "dSigmadW": 7,
+    "EXP_dSigmadEnu": 10,
+    "EXP_dSigmadCosThetadElepton": 11,
+    "EXP_dSigmadQsdElepton": 12,
+    "EXP_dSigmadQs": 13,
+    "EXP_dSigmadCosTheta": 14,
+    "EXP_dSigmadElepton": 15,
+    "EXP_dSigmaMC": 16,
+    "EXP_dSigmadW": 17,
+}
+
+
+class Jobcard(f90nml.Namelist):
+    """
+    A object to manage GiBUU jobcard properties and format them
+
+    Parameters
+    ----------
+    input_path: str
+        The input path pointing to the GiBUU lookup data which should be used
+    """
+    def __init__(self, *args, **kwargs):
+        super(Jobcard, self).__init__(*args, **kwargs)
+        if "input_path" in kwargs:
+            self.input_path = str(input_path)
+            del kwargs["input_path"]
+        else:
+            self.input_path = INPUT_PATH
+        self.__getitem__("input")["path_to_input"] = self.input_path
+
+    def __getitem__(self, key):
+        if not self.__contains__(key):
+            self.__setitem__(key, f90nml.Namelist())
+        return super(Jobcard, self).__getitem__(key)
+
+    def _clean_namelist(self):
+        for k, v in self.items():
+            if isinstance(v, f90nml.Namelist) and len(v) == 0:
+                self.__delitem__(k)
+
+    def __str__(self):
+        self._clean_namelist()
+        stream = StringIO()
+        self.write(stream)
+        return stream.getvalue()
+
+
+def read_jobcard(filepath):
+    return Jobcard(f90nml.read(filepath))
+
+
+def generate_neutrino_jobcard_template(
+    process,
+    flavour,
+    energy,
+    target,
+    write_events=False,
+    input_path=INPUT_PATH):  # pragma: no cover
+    """
+    Generate a jobcard for neutrino interaction
+
+    Parameters
+    ----------
+    process: str
+        Interaction channel ["CC", "NC", "antiCC", "antiNC"]
+    flavour: str
+        Flavour ["electron", "muon", "tau"]
+    energy: float
+        Initial energy of the neutrino in GeV
+    target: (int, int)
+        (Z, A) describing the target nukleon
+    input_path: str
+        The input path pointing to the GiBUU lookup data which should be used
+    """
+    jc = Jobcard(input_path)
+    # NEUTRINO
+    jc["neutrino_induced"]["process_ID"] = PROCESS_LOOKUP[process.lower()]
+    jc["neutrino_induced"]["flavour_ID"] = FLAVOR_LOOKUP[flavour.lower()]
+    jc["neutrino_induced"]["nuXsectionMode"] = 6
+    jc["neutrino_induced"]["includeDIS"] = True
+    jc["neutrino_induced"]["includeDELTA"] = True
+    jc["neutrino_induced"]["includeRES"] = True
+    jc["neutrino_induced"]["includeQE"] = True
+    jc["neutrino_induced"]["include1pi"] = True
+    jc["neutrino_induced"]["include2p2hQE"] = True
+    jc["neutrino_induced"]["include2pi"] = False
+    jc["neutrino_induced"]["include2p2hDelta"] = False
+    jc["neutrino_inducted"]["printAbsorptionXS"] = True
+
+    # INPUT
+    jc["input"]["numTimeSteps"] = 0
+    jc["input"]["eventtype"] = 5
+    jc["input"]["numEnsembles"] = 100000
+    jc["input"]["delta_T"] = 0.2
+    jc["input"]["localEnsemble"] = True
+    jc["input"]["num_runs_SameEnergy"] = 1
+    # TARGET
+    jc["target"]["Z"] = target[0]
+    jc["target"]["A"] = target[1]
+    # MISC
+    jc["neutrinoAnalysis"]["outputEvents"] = write_events
+    return jc
diff --git a/km3buu/output.py b/km3buu/output.py
new file mode 100644
index 0000000000000000000000000000000000000000..70b1aaf02b1e671969b79e86044a5bacd6f5cbf5
--- /dev/null
+++ b/km3buu/output.py
@@ -0,0 +1,327 @@
+# Filename: output.py
+"""
+IO for km3buu
+
+"""
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import re
+import numpy as np
+from io import StringIO
+from os import listdir
+from os.path import isfile, join, abspath
+from tempfile import TemporaryDirectory
+import awkward
+import uproot
+from scipy.interpolate import UnivariateSpline
+from scipy.spatial.transform import Rotation
+
+from .jobcard import Jobcard, read_jobcard, PDGID_LOOKUP
+
+EVENT_FILENAME = "FinalEvents.dat"
+ROOT_PERT_FILENAME = "EventOutput.Pert.[0-9]{8}.root"
+ROOT_REAL_FILENAME = "EventOutput.Real.[0-9]{8}.root"
+FLUXDESCR_FILENAME = "neutrino_initialized_energyFlux.dat"
+XSECTION_FILENAMES = {"all": "neutrino_absorption_cross_section_ALL.dat"}
+
+PARTICLE_COLUMNS = ["E", "Px", "Py", "Pz", "barcode"]
+EVENTINFO_COLUMNS = [
+    "weight", "evType", "lepIn_E", "lepIn_Px", "lepIn_Py", "lepIn_Pz",
+    "lepOut_E", "lepOut_Px", "lepOut_Py", "lepOut_Pz", "nuc_E", "nuc_Px",
+    "nuc_Py", "nuc_Pz"
+]
+
+LHE_NU_INFO_DTYPE = np.dtype([
+    ("type", np.int),
+    ("weight", np.float64),
+    ("mom_lepton_in_E", np.float64),
+    ("mom_lepton_in_x", np.float64),
+    ("mom_lepton_in_y", np.float64),
+    ("mom_lepton_in_z", np.float64),
+    ("mom_lepton_out_E", np.float64),
+    ("mom_lepton_out_x", np.float64),
+    ("mom_lepton_out_y", np.float64),
+    ("mom_lepton_out_z", np.float64),
+    ("mom_nucleon_in_E", np.float64),
+    ("mom_nucleon_in_x", np.float64),
+    ("mom_nucleon_in_y", np.float64),
+    ("mom_nucleon_in_z", np.float64),
+])
+
+FLUX_INFORMATION_DTYPE = np.dtype([("energy", np.float64),
+                                   ("flux", np.float64),
+                                   ("events", np.float64)])
+
+EVENT_TYPE = {
+    1: "QE",
+    32: "pi neutron-background",
+    33: "pi proton-background",
+    34: "DIS",
+    35: "2p2h QE",
+    36: "2p2h Delta",
+    37: "2pi background",
+}
+
+
+def read_nu_abs_xsection(filepath):
+    """
+    Read the crosssections calculated by GiBUU
+
+    Parameters
+    ----------
+    filepath: str
+        Filepath to the GiBUU output file with neutrino absorption cross-section
+        (neutrino_absorption_cross_section_*.dat)
+
+    """
+    with open(filepath, "r") as f:
+        lines = f.readlines()
+    header = re.sub(r"\d+:|#", "", lines[0]).split()
+    dt = np.dtype([(field, np.float64) for field in header])
+    values = np.genfromtxt(StringIO(lines[-1]), dtype=dt)
+    return values
+
+
+def parse_gibuu_event_info(line):
+    fields = line.split()[1:]
+    if int(fields[0]) != 5:
+        raise NotImplementedError(
+            "Event information type %s cannot be parsed yet!" % fields[0])
+    else:
+        return np.genfromtxt(StringIO(line[3:]), dtype=LHE_NU_INFO_DTYPE)
+
+
+class GiBUUOutput:
+    def __init__(self, data_dir):
+        """
+        Class for parsing GiBUU output files
+
+        Parameters
+        ----------
+        data_dir: str
+        """
+        if isinstance(data_dir, TemporaryDirectory):
+            self._tmp_dir = data_dir
+            self._data_path = abspath(data_dir.name)
+        else:
+            self._data_path = abspath(data_dir)
+        self.output_files = [
+            f for f in listdir(self._data_path)
+            if isfile(join(self._data_path, f))
+        ]
+        self._read_xsection_file()
+        self._read_root_output()
+        self._read_flux_file()
+        self._read_jobcard()
+
+    def _read_root_output(self):
+        root_pert_regex = re.compile(ROOT_PERT_FILENAME)
+        self.root_pert_files = list(
+            filter(root_pert_regex.match, self.output_files))
+
+        root_real_regex = re.compile(ROOT_REAL_FILENAME)
+        self.root_real_files = list(
+            filter(root_real_regex.match, self.output_files))
+
+    def _read_xsection_file(self):
+        if XSECTION_FILENAMES["all"] in self.output_files:
+            setattr(
+                self,
+                "xsection",
+                read_nu_abs_xsection(
+                    join(self._data_path, XSECTION_FILENAMES["all"])),
+            )
+
+    def _read_jobcard(self):
+        jobcard_regex = re.compile(".*.job")
+        jobcard_files = list(filter(jobcard_regex.match, self.output_files))
+        if len(jobcard_files) == 1:
+            self._jobcard_fname = jobcard_files[0]
+            self.jobcard = read_jobcard(
+                join(self._data_path, self._jobcard_fname))
+        else:
+            self.jobcard = None
+
+    def _read_flux_file(self):
+        fpath = join(self._data_path, FLUXDESCR_FILENAME)
+        self.flux_data = np.loadtxt(fpath, dtype=FLUX_INFORMATION_DTYPE)
+        self.flux_interpolation = UnivariateSpline(self.flux_data["energy"],
+                                                   self.flux_data["events"])
+
+    @property
+    def event_weights(self):
+        event_df = self.event_info_df
+        gibuu_wgt = event_df["weight"]
+        flux = self.flux_interpolation(event_df["lepIn_E"])
+        energy_min = np.min(self.flux_data["energy"])
+        energy_max = np.max(self.flux_data["energy"])
+        total_events = self.flux_interpolation.integral(energy_min, energy_max)
+        n_files = len(self.root_pert_files)
+        wgt = np.divide(total_events * gibuu_wgt, flux * n_files)
+        return wgt
+
+    @property
+    def particle_df(self):
+        import pandas as pd
+        df = None
+        for fname in self.root_pert_files:
+            fobj = uproot.open(join(self._data_path, fname))
+            file_df = None
+            for col in PARTICLE_COLUMNS:
+                tmp = awkward.topandas(fobj["RootTuple"][col].array(),
+                                       flatten=True)
+                tmp.name = col
+                if file_df is None:
+                    file_df = tmp
+                else:
+                    file_df = pd.concat([file_df, tmp], axis=1)
+            if df is None:
+                df = file_df
+            else:
+                new_indices = file_df.index.levels[0] + df.index.levels[0].max(
+                ) + 1
+                file_df.index = file_df.index.set_levels(new_indices, level=0)
+                df = df.append(file_df)
+            fobj.close()
+        return df
+
+    @property
+    def event_info_df(self):
+        import pandas as pd
+        df = None
+        for fname in self.root_pert_files:
+            fobj = uproot.open(join(self._data_path, fname))
+            event_data = fobj["RootTuple"]
+            dct = {k: event_data[k].array() for k in EVENTINFO_COLUMNS}
+            if df is None:
+                df = pd.DataFrame(dct)
+            else:
+                df = df.append(pd.DataFrame(dct), ignore_index=True)
+            df["By"] = 1 - df.lepOut_E / df.lepIn_E
+        return df
+
+
+def write_detector_file(gibuu_output,
+                        ofile="gibuu.aanet.root",
+                        can=(0, 476.5, 403.4),
+                        livetime=3.156e7):
+    """
+    Convert the GiBUU output to a KM3NeT MC (AANET) file
+
+    Parameters
+    ----------
+    gibuu_output: GiBUUOutput
+        Output object which wraps the information from the GiBUU output files
+    ofile: str
+        Output filename
+    can: tuple
+        The can dimensions which are used to distribute the events
+    livetime: float
+        The data livetime
+    """
+    import aa, ROOT
+
+    aafile = ROOT.EventFile()
+    aafile.set_output(ofile)
+    mc_event_id = 0
+
+    is_cc = False
+
+    if gibuu_output.jobcard is None:
+        raise EnvironmentError("No jobcard provided within the GiBUU output!")
+
+    nu_type = PDGID_LOOKUP[gibuu_output.jobcard["neutrino_induced"]
+                           ["flavor_id"]]
+    sec_lep_type = nu_type
+    ichan = abs(gibuu_output.jobcard["neutrino_induced"]["process_id"])
+    if ichan == 2:
+        is_cc = True
+        sec_lep_type -= 1
+    if gibuu_output.jobcard["neutrino_induced"]["process_id"] < 0:
+        nu_type *= -1
+        sec_lep_type *= -1
+
+    for ifile in gibuu_output.root_pert_files:
+        fobj = uproot.open(ifile)
+        event_data = fobj["RootTuple"]
+        for event in event_data.lazyarrays():
+            aafile.evt.clear()
+            aafile.evt.id = mc_event_id
+            aafile.evt.mc_run_id = mc_event_id
+            mc_event_id += 1
+            # Vertex Position
+            r = can[2] * np.sqrt(np.random.uniform(0, 1))
+            phi = np.random.uniform(0, 2 * np.pi)
+            pos_x = r * np.cos(phi)
+            pos_y = r * np.sin(phi)
+            pos_z = np.random.uniform(can[0], can[1])
+            vtx_pos = np.array([pos_x, pos_y, pos_z])
+            # Direction
+            phi = np.random.uniform(0, 2 * np.pi)
+            cos_theta = np.random.uniform(-1, 1)
+            sin_theta = np.sqrt(1 - cos_theta**2)
+
+            dir_x = np.cos(phi) * sin_theta
+            dir_y = np.sin(phi) * sin_theta
+            dir_z = cos_theta
+
+            direction = np.array([dir_x, dir_y, dir_z])
+            rotation = np.array([dir_y, -dir_x, 0])
+            sin_rot = np.linalg.norm(rotation)
+            R = Rotation.from_rotvec(rotation * np.arcsin(sin_rot) / sin_rot)
+
+            timestamp = np.random.uniform(0, livetime)
+
+            nu_in_trk = ROOT.Trk()
+            nu_in_trk.id = 0
+            nu_in_trk.mother_id = -1
+            nu_in_trk.type = nu_type
+            nu_in_trk.pos.set(*vtx_pos)
+            nu_in_trk.dir.set(*direction)
+            nu_in_trk.E = event.lepIn_E
+            nu_in_trk.t = timestamp
+
+            lep_out_trk = ROOT.Trk()
+            lep_out_trk.id = 1
+            lep_out_trk.mother_id = 0
+            lep_out_trk.type = sec_lep_type
+            lep_out_trk.pos.set(*vtx_pos)
+            mom = np.array([event.lepOut_Px, event.lepOut_Py, event.lepOut_Pz])
+            p_dir = R.apply(mom / np.linalg.norm(mom))
+            lep_out_trk.dir.set(*p_dir)
+            lep_out_trk.E = event.lepOut_E
+            lep_out_trk.t = timestamp
+
+            bjorken_y = 1.0 - float(event.lepOut_E / event.lepIn_E)
+            nu_in_trk.setusr('bx', -1)
+            nu_in_trk.setusr('by', bjorken_y)
+            nu_in_trk.setusr('ichan', ichan)
+            nu_in_trk.setusr("cc", is_cc)
+
+            aafile.evt.mc_trks.push_back(nu_in_trk)
+            aafile.evt.mc_trks.push_back(lep_out_trk)
+
+            for i in range(len(event.E)):
+                trk = ROOT.Trk()
+                trk.id = i + 2
+                mom = np.array([event.Px[i], event.Py[i], event.Pz[i]])
+                p_dir = R.apply(mom / np.linalg.norm(mom))
+                trk.pos.set(*vtx_pos)
+                trk.dir.set(*p_dir)
+                trk.mother_id = 0
+                trk.type = int(event.barcode[i])
+                trk.E = event.E[i]
+                trk.t = timestamp
+                aafile.evt.mc_trks.push_back(trk)
+            aafile.write()
+            # if mc_event_id > 100:
+            #     break
+
+    del aafile
diff --git a/km3buu/tests/__init__.py b/km3buu/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/km3buu/tests/test_ctrl.py b/km3buu/tests/test_ctrl.py
new file mode 100644
index 0000000000000000000000000000000000000000..04fb14d3f9a8f2652d44fa054d6b202dfe85a2c9
--- /dev/null
+++ b/km3buu/tests/test_ctrl.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Filename: test_ctrl.py
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import unittest
+import numpy as np
+from km3buu.jobcard import *
+from km3buu.ctrl import run_jobcard
+from tempfile import TemporaryDirectory
+from os import listdir
+from os.path import abspath, join, dirname
+from thepipe.logger import get_logger
+
+JOBCARD_FOLDER = abspath(join(dirname(__file__), "../../jobcards"))
+
+# class TestCTRLmisc(unittest.TestCase):
+#     def test_invalid_jobcard(self):
+
+
+class TestCTRLbyJobcardFile(unittest.TestCase):
+    def setUp(self):
+        self.filename = join(JOBCARD_FOLDER, "examples/example.job")
+        self.output_dir = TemporaryDirectory()
+        self.retval = run_jobcard(self.filename, self.output_dir.name)
+        log = get_logger("ctrl.py")
+        log.setLevel("INFO")
+
+    def test_output(self):
+        assert self.retval == 0
+
+    def test_output_files_existing(self):
+        files = listdir(self.output_dir.name)
+        assert "FinalEvents.dat" in files
+
+
+class TestCTRLbyJobcardObject(unittest.TestCase):
+    def setUp(self):
+        log = get_logger("ctrl.py")
+        log.setLevel("INFO")
+        self.test_jobcard = Jobcard()
+        # NEUTRINO
+        self.test_jobcard["neutrino_induced"]["process_ID"] = PROCESS_LOOKUP[
+            "cc"]
+        self.test_jobcard["neutrino_induced"]["flavor_ID"] = FLAVOR_LOOKUP[
+            "electron"]
+        self.test_jobcard["neutrino_induced"][
+            "nuXsectionMode"] = XSECTIONMODE_LOOKUP["dSigmaMC"]
+        self.test_jobcard["neutrino_induced"]["includeDIS"] = True
+        self.test_jobcard["neutrino_induced"]["printAbsorptionXS"] = True
+        self.test_jobcard["nl_SigmaMC"]["enu"] = 1
+        # INPUT
+        self.test_jobcard["input"]["numTimeSteps"] = 0
+        self.test_jobcard["input"]["eventtype"] = 5
+        self.test_jobcard["input"]["numEnsembles"] = 1
+        self.test_jobcard["input"]["delta_T"] = 0.2
+        self.test_jobcard["input"]["localEnsemble"] = True
+        self.test_jobcard["input"]["num_runs_SameEnergy"] = 1
+        self.test_jobcard["input"]["LRF_equals_CALC_frame"] = True
+        # TARGET
+        self.test_jobcard["target"]["target_Z"] = 1
+        self.test_jobcard["target"]["target_A"] = 1
+        # MISC
+        # self.test_jobcard["nl_neutrinoxsection"]["DISmassless"] =  True
+        self.test_jobcard["neutrinoAnalysis"]["outputEvents"] = True
+        self.test_jobcard["pythia"]["PARP(91)"] = 0.44
+        self.output_dir = TemporaryDirectory()
+        self.retval = run_jobcard(self.test_jobcard, self.output_dir.name)
+        # raise Exception(self.test_jobcard)
+
+    def test_output(self):
+        assert self.retval == 0
+
+    def test_output_files_existing(self):
+        files = listdir(self.output_dir.name)
+        assert "FinalEvents.dat" in files
diff --git a/km3buu/tests/test_environment.py b/km3buu/tests/test_environment.py
new file mode 100644
index 0000000000000000000000000000000000000000..247690b1e66adda9d5c3c93be01c24a19f5738d8
--- /dev/null
+++ b/km3buu/tests/test_environment.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Filename: test_environment.py
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import unittest
+from unittest.mock import patch
+from km3buu.environment import *
+from os.path import dirname, join
+from spython.main import Client
+from km3buu import DOCKER_URL, IMAGE_NAME
+
+
+class TestBuild(unittest.TestCase):
+    def test_wrong_dir_path(self):
+        wrong_path = "foobar"
+        try:
+            build_image(wrong_path)
+            assert False
+        except OSError as e:
+            assert str(e) == "Directory not found!"
+
+    @patch.object(Client, 'build', return_value=123)
+    def test_build_cmd(self, function):
+        existing_path = dirname(__file__)
+        assert build_image(existing_path) == 123
+        expected_image_path = join(existing_path, IMAGE_NAME)
+        function.assert_called_once_with(DOCKER_URL,
+                                         image=expected_image_path,
+                                         sudo=False,
+                                         ext="simg")
diff --git a/km3buu/tests/test_jobcard.py b/km3buu/tests/test_jobcard.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5c0f339f5080f81ae278fafc0e8adaf029b820e
--- /dev/null
+++ b/km3buu/tests/test_jobcard.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Filename: test_jobcard.py
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import unittest
+import numpy as np
+from km3buu.jobcard import Jobcard, INPUT_PATH
+
+
+class TestJobcard(unittest.TestCase):
+    def setUp(self):
+        self.test_jobcard = Jobcard()
+        # Insert some test elements
+        self.test_jobcard["ABC"]["def"] = 42
+
+    def test_input_path(self):
+        expected_line = "path_to_input = '%s'" % INPUT_PATH
+        ctnt = str(self.test_jobcard)
+        group_start = ctnt.find("&input")
+        group_end = ctnt.find("/\n", group_start)
+        assert ctnt[group_start:group_end].find(expected_line) != -1
+
+    def test_elements(self):
+        ctnt = str(self.test_jobcard)
+        expected_line = "def = 42"
+        group_start = ctnt.find("&abc")
+        group_end = ctnt.find("/", group_start)
+        print(ctnt)
+        assert ctnt[group_start:group_end].find(expected_line) != -1
+
+    def test_remove_elements(self):
+        del self.test_jobcard["ABC"]["def"]
+        ctnt = str(self.test_jobcard)
+        expected_line = "def = 42"
+        assert ctnt.find("&ABC") == -1
+        assert ctnt.find(expected_line) == -1
diff --git a/km3buu/tests/test_output.py b/km3buu/tests/test_output.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d28c5a64f44d0dfc607c4459f399658f1271041
--- /dev/null
+++ b/km3buu/tests/test_output.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Filename: test_output.py
+
+__author__ = "Johannes Schumann"
+__copyright__ = "Copyright 2020, Johannes Schumann and the KM3NeT collaboration."
+__credits__ = []
+__license__ = "MIT"
+__maintainer__ = "Johannes Schumann"
+__email__ = "jschumann@km3net.de"
+__status__ = "Development"
+
+import unittest
+import numpy as np
+from km3buu.output import *
+from os import listdir
+from os.path import abspath, join, dirname
+from km3net_testdata import data_path
+
+TESTDATA_DIR = data_path("gibuu")
+
+
+class TestXSection(unittest.TestCase):
+    def test_xsection_all(self):
+        filename = join(TESTDATA_DIR, XSECTION_FILENAMES["all"])
+        xsection = read_nu_abs_xsection(filename)
+        self.assertAlmostEqual(xsection['var'], 58.631)
+        self.assertAlmostEqual(xsection['sum'], 8.0929)
+        self.assertAlmostEqual(xsection['Delta'], 0.26805)
+        self.assertAlmostEqual(xsection['highRES'], 0.14248)
+
+
+class TestGiBUUOutput(unittest.TestCase):
+    def setUp(self):
+        self.output = GiBUUOutput(TESTDATA_DIR)
+
+    def test_attr(self):
+        assert hasattr(self.output, "event_info_df")
+        assert hasattr(self.output, "particle_df")
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000000000000000000000000000000000000..24b78be3d57f88d2bae4c6cb9cdb0fc3ef199bb9
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,3 @@
+[pytest]
+docstyle_convetion = numpy
+junit_family=xunit1
diff --git a/requirements-dev.txt b/requirements-dev.txt
new file mode 100644
index 0000000000000000000000000000000000000000..dfc2d38c6f1edb57721ecfd8cacde943d43d8f07
--- /dev/null
+++ b/requirements-dev.txt
@@ -0,0 +1,16 @@
+numpydoc
+pydocstyle
+pytest
+pytest-cov
+pytest-flake8
+pytest-pylint
+pytest-watch
+sphinx-rtd-theme
+sphinx
+sphinxcontrib-napoleon
+sphinxcontrib-programoutput
+sphinxcontrib-websupport
+sphinx-autoapi
+setuptools_scm
+yapf>=0.25
+km3net-testdata>=0.2.11
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ccda8236773cd7d5f8061eb51d61c1aa22ca95e0
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,11 @@
+setuptools_scm
+numpy
+scipy
+spython
+thepipe
+particle
+click
+f90nml
+uproot
+awkward
+pandas
diff --git a/scripts/io.jl b/scripts/io.jl
deleted file mode 100644
index dc068021a2a15969caed75f6f0b89660f04fcfbc..0000000000000000000000000000000000000000
--- a/scripts/io.jl
+++ /dev/null
@@ -1,40 +0,0 @@
-using CSV
-
-"""
-    read_final_events(filepath::AbstractString)
-
-Function for reading the final events from the GiBUU output  
-
-# Arguments
-- `filepath::AbstractString`: filepath to the FinalEvents.dat
-"""
-function read_final_events(filepath::AbstractString)
-    file = open(filepath)
-    header = readline(file)
-    close(file)
-    raw_col_names = split(header)[2:end]
-    col_names = [String.(split(col,":"))[end] for col in raw_col_names]
-    CSV.read(filepath, 
-             header=col_names, 
-             delim=' ', 
-             comment="#", 
-             ignorerepeated=true, 
-             types=[Int32, 
-                    Int32, 
-                    Int32, 
-                    Int32, 
-                    Float64,
-                    Float64,
-                    Float64,
-                    Float64,
-                    Float64,
-                    Float64,
-                    Float64,
-                    Float64,
-                    Int32, 
-                    Int32, 
-                    Float64
-                    ])
-end
-
-
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..e7c6f7346c1fc9555b83fbf9a46ebe3d6f4c19e4
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,48 @@
+#!usr/bin/env python
+# -*- coding: utf-8 -*-
+# Filename: setup.py
+"""
+KM3BUU setup script.
+
+"""
+import os
+import tempfile
+from setuptools import setup, find_packages
+
+PACKAGE_NAME = 'km3buu'
+URL = 'https://git.km3net.de/simulation/km3buu'
+DESCRIPTION = 'GiBUU tools for KM3NeT'
+__author__ = 'Johannes Schumann'
+__email__ = 'jschumann@km3net.de'
+
+with open('requirements.txt') as fobj:
+    REQUIREMENTS = [l.strip() for l in fobj.readlines()]
+
+with open('requirements-dev.txt') as fobj:
+    DEV_REQUIREMENTS = [l.strip() for l in fobj.readlines()]
+
+setup(
+    name=PACKAGE_NAME,
+    url=URL,
+    description=DESCRIPTION,
+    author=__author__,
+    author_email=__email__,
+    packages=find_packages(),
+    include_package_data=True,
+    platforms='any',
+    setup_requires=['setuptools_scm'],
+    use_scm_version={
+        'write_to': '{}/version.txt'.format(PACKAGE_NAME),
+        'tag_regex': r'^(?P<prefix>v)?(?P<version>[^\+]+)(?P<suffix>.*)?$',
+    },
+    install_requires=REQUIREMENTS,
+    extras_require={'dev': DEV_REQUIREMENTS},
+    python_requires='>=3.0',
+    entry_points={'console_scripts': ['km3buu=km3buu.cmd:main']},
+    classifiers=[
+        'Development Status :: 3 - Alpha',
+        'Intended Audience :: Developers',
+        'Intended Audience :: Science/Research',
+        'Programming Language :: Python',
+    ],
+)