diff --git a/.travis.yml b/.travis.yml
index d38637977..c0282b855 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,7 +6,6 @@ branches:
   - master
   - stable
 sudo: required
-dist: trusty
 language: cpp
 
 git:
@@ -39,46 +38,48 @@ jobs:
     ###
     # Stage: Build Carl
     ###
-
     # ubuntu-19.04 - DefaultDebugTravis
     - stage: Build Carl
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebugTravis LINUX=ubuntu-19.04 COMPILER=gcc
-      install:
-        - travis/install_linux.sh
+      env: CONFIG=DefaultDebugTravis TASK=TestDocker LINUX=ubuntu-19.04 COMPILER=gcc
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
         - travis/build_carl.sh
-      after_success:
-        - travis/deploy_carl.sh
+      before_cache:
+        - docker cp carl:/opt/carl/. .
+      deploy:
+        - provider: script
+          skip_cleanup: true
+          script: bash travis/deploy_docker.sh carl
     # ubuntu-19.04 - DefaultReleaseTravis
     - stage: Build Carl
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
-      install:
-        - travis/install_linux.sh
+      env: CONFIG=DefaultReleaseTravis TASK=TestDockerDoxygen LINUX=ubuntu-19.04 COMPILER=gcc
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
         - travis/build_carl.sh
-      after_success:
-        - travis/deploy_carl.sh
+      before_cache:
+        - docker cp carl:/opt/carl/. .
+      deploy:
+        - provider: script
+          skip_cleanup: true
+          script: bash travis/deploy_docker.sh carl
 
     ###
     # Stage: Build (1st run)
     ###
-
     # ubuntu-18.04 - DefaultDebug
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -91,10 +92,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -107,10 +108,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -123,10 +124,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -139,10 +140,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -155,10 +156,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -171,10 +172,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebugTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultDebugTravis TASK=TestDocker LINUX=ubuntu-19.04 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -187,10 +188,10 @@ jobs:
     - stage: Build (1st run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultReleaseTravis TASK=TestDockerDoxygen LINUX=ubuntu-19.04 COMPILER=gcc
       install:
         - rm -rf build
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -203,14 +204,13 @@ jobs:
     ###
     # Stage: Build (2nd run)
     ###
-
     # ubuntu-18.04 - DefaultDebug
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -223,9 +223,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -238,9 +238,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -253,9 +253,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -268,9 +268,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -283,9 +283,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -298,9 +298,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebugTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultDebugTravis TASK=TestDocker LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -313,9 +313,9 @@ jobs:
     - stage: Build (2nd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultReleaseTravis TASK=TestDockerDoxygen LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -328,14 +328,13 @@ jobs:
     ###
     # Stage: Build (3rd run)
     ###
-
     # ubuntu-18.04 - DefaultDebug
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -348,9 +347,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -363,9 +362,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -378,9 +377,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -393,9 +392,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -408,9 +407,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -423,9 +422,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebugTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultDebugTravis TASK=TestDocker LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -438,9 +437,9 @@ jobs:
     - stage: Build (3rd run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultReleaseTravis TASK=TestDockerDoxygen LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -453,14 +452,13 @@ jobs:
     ###
     # Stage: Build (4th run)
     ###
-
     # ubuntu-18.04 - DefaultDebug
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -473,9 +471,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -488,9 +486,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -503,9 +501,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -518,9 +516,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -533,9 +531,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -548,9 +546,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebugTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultDebugTravis TASK=TestDocker LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -563,9 +561,9 @@ jobs:
     - stage: Build (4th run)
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultReleaseTravis TASK=TestDockerDoxygen LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
@@ -576,147 +574,142 @@ jobs:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
 
     ###
-    # Stage: Test all
+    # Stage: Tasks
     ###
-
     # ubuntu-18.04 - DefaultDebug
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
     # ubuntu-18.04 - DefaultRelease
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.04 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
     # debian-9 - DefaultDebug
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
     # debian-9 - DefaultRelease
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=debian-9 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=debian-9 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
     # ubuntu-18.10 - DefaultDebug
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebug LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultDebug TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
     # ubuntu-18.10 - DefaultRelease
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultRelease LINUX=ubuntu-18.10 COMPILER=gcc
+      env: CONFIG=DefaultRelease TASK=Test LINUX=ubuntu-18.10 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
     # ubuntu-19.04 - DefaultDebugTravis
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultDebugTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultDebugTravis TASK=TestDocker LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
-      after_success:
-        - travis/deploy_storm.sh
+      deploy:
+        - provider: script
+          skip_cleanup: true
+          script: bash travis/deploy_docker.sh storm
     # ubuntu-19.04 - DefaultReleaseTravis
-    - stage: Test all
+    - stage: Tasks
       os: linux
       compiler: gcc
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      env: CONFIG=DefaultReleaseTravis TASK=TestDockerDoxygen LINUX=ubuntu-19.04 COMPILER=gcc
       install:
-        - travis/install_linux.sh
+        - travis/skip_test.sh
       before_script:
         - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode
       script:
-        - travis/build.sh TestAll
+        - travis/build.sh Tasks
       before_cache:
         - docker cp storm:/opt/storm/. .
       after_failure:
         - find build -iname '*err*.log' -type f -print -exec cat {} \;
-      after_success:
-        - travis/deploy_storm.sh
-  allow_failures:
-    - stage: Build (1st run)
-      os: linux
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
-    - stage: Build (2nd run)
-      os: linux
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
-    - stage: Build (3rd run)
-      os: linux
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
-    - stage: Build (4th run)
-      os: linux
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
-    - stage: Test all
-      os: linux
-      env: CONFIG=DefaultReleaseTravis LINUX=ubuntu-19.04 COMPILER=gcc
+      deploy:
+        - provider: script
+          skip_cleanup: true
+          script: bash travis/deploy_docker.sh storm
+        - provider: pages
+          skip_cleanup: true
+          github_token: $GITHUB_TOKEN
+          local_dir: build/doc/html/
+          repo: moves-rwth/storm-doc
+          target_branch: master
+          on:
+            branch: master
 
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4ff9ea3a6..b2fe1b814 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -386,22 +386,8 @@ endif()
 # in the the system does not have a library
 include(resources/3rdparty/CMakeLists.txt)
 
-#############################################################
-##
-##	Doxygen
-##
-#############################################################
-
-find_package(Doxygen)
-# Add a target to generate API documentation with Doxygen
-if(DOXYGEN_FOUND)
-    set(CMAKE_DOXYGEN_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/doc")
-    string(REGEX REPLACE ";" " " CMAKE_DOXYGEN_INPUT_LIST "${PROJECT_SOURCE_DIR}/src")
-
-    configure_file("${CMAKE_CURRENT_SOURCE_DIR}/resources/doxygen/Doxyfile.in" "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" @ONLY)
-
-    add_custom_target(doc ${DOXYGEN_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" COMMENT "Generating API documentation with Doxygen" VERBATIM)
-endif(DOXYGEN_FOUND)
+# Include Doxygen
+include(resources/doxygen/CMakeLists.txt)
 
 #############################################################
 ##
diff --git a/resources/doxygen/CMakeLists.txt b/resources/doxygen/CMakeLists.txt
new file mode 100644
index 000000000..6105db27f
--- /dev/null
+++ b/resources/doxygen/CMakeLists.txt
@@ -0,0 +1,36 @@
+# Enable operator IN_LIST to avoid problems with CMake version 3.12
+if(POLICY CMP0057)
+    cmake_policy(SET CMP0057 NEW)
+endif()
+
+find_package(Doxygen)
+# Add a target to generate API documentation with Doxygen
+if(DOXYGEN_FOUND)
+
+    # We use the doxygen command of CMake instead of using the separate config file
+    set(DOXYGEN_PROJECT_NAME "Storm")
+    set(DOXYGEN_PROJECT_BRIEF "A Modern Probabilistic Model Checker")
+    set(DOXYGEN_BRIEF_MEMBER_DESC YES)
+    set(DOXYGEN_REPEAT_BRIEF YES)
+    set(DOXYGEN_JAVADOC_AUTOBRIEF YES)
+    set(DOXYGEN_QT_AUTOBRIEF YES)
+    set(DOXYGEN_EXTRACT_ALL YES)
+    set(DOXYGEN_EXTRACT_STATIC YES)
+    set(DOXYGEN_SOURCE_BROWSER YES)
+    set(DOXYGEN_GENERATE_TREEVIEW YES)
+    set(DOXYGEN_CASE_SENSE_NAMES NO)
+    set(DOXYGEN_HTML_TIMESTAMP YES)
+    set(DOXYGEN_CREATE_SUBDIRS YES)
+    set(DOXYGEN_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/doc")
+    doxygen_add_docs(
+        doc
+        "${PROJECT_SOURCE_DIR}/src"
+        COMMENT "Generating API documentation with Doxygen"
+    )
+
+    # These commands can be used if the separate config files should be used
+    #set(CMAKE_DOXYGEN_OUTPUT_DIR "${CMAKE_CURRENT_BINARY_DIR}/doc")
+    #string(REGEX REPLACE ";" " " CMAKE_DOXYGEN_INPUT_LIST "${PROJECT_SOURCE_DIR}/src")
+    #configure_file("${CMAKE_CURRENT_SOURCE_DIR}/resources/doxygen/Doxyfile.in.new" "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" @ONLY)
+    #add_custom_target(doc ${DOXYGEN_EXECUTABLE} "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/Doxyfile" COMMENT "Generating API documentation with Doxygen" VERBATIM)
+endif(DOXYGEN_FOUND)
diff --git a/resources/doxygen/Doxyfile.in b/resources/doxygen/Doxyfile.in
index a328991e3..c8b93cd17 100644
--- a/resources/doxygen/Doxyfile.in
+++ b/resources/doxygen/Doxyfile.in
@@ -1,104 +1,132 @@
-# Doxyfile 1.8.1.1
+# Doxyfile 1.8.15
 
 # This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project
+# doxygen (www.doxygen.org) for a project.
 #
-# All text after a hash (#) is considered a comment and will be ignored
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
 # The format is:
-#       TAG = value [value, ...]
-# For lists items can also be appended using:
-#       TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (" ")
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
 
 #---------------------------------------------------------------------------
 # Project related configuration options
 #---------------------------------------------------------------------------
 
-# This tag specifies the encoding used for all characters in the config file 
-# that follow. The default is UTF-8 which is also the encoding used for all 
-# text before the first occurrence of this tag. Doxygen uses libiconv (or the 
-# iconv built into libc) for the transcoding. See 
-# http://www.gnu.org/software/libiconv for the list of possible encodings.
+# This tag specifies the encoding used for all characters in the configuration
+# file that follow. The default is UTF-8 which is also the encoding used for all
+# text before the first occurrence of this tag. Doxygen uses libiconv (or the
+# iconv built into libc) for the transcoding. See
+# https://www.gnu.org/software/libiconv/ for the list of possible encodings.
+# The default value is: UTF-8.
 
 DOXYFILE_ENCODING      = UTF-8
 
-# The PROJECT_NAME tag is a single word (or sequence of words) that should 
-# identify the project. Note that if you do not use Doxywizard you need 
-# to put quotes around the project name if it contains spaces.
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
+# double-quotes, unless you are using Doxywizard) that should identify the
+# project for which the documentation is generated. This name is used in the
+# title of most generated pages and in a few other places.
+# The default value is: My Project.
 
-PROJECT_NAME           = "STORM"
+PROJECT_NAME           = "Storm"
 
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. 
-# This could be handy for archiving the generated documentation or 
-# if some version control system is used.
+# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
+# could be handy for archiving the generated documentation or if some version
+# control system is used.
 
-PROJECT_NUMBER         = 
+# TODO: set from storm-version
+PROJECT_NUMBER         = "1.3.0"
 
-# Using the PROJECT_BRIEF tag one can provide an optional one line description 
-# for a project that appears at the top of each page and should give viewer 
-# a quick idea about the purpose of the project. Keep the description short.
+# Using the PROJECT_BRIEF tag one can provide an optional one line description
+# for a project that appears at the top of each page and should give viewer a
+# quick idea about the purpose of the project. Keep the description short.
 
-PROJECT_BRIEF          = "A complete rewrite of MRMC in C++"
+PROJECT_BRIEF          = "A Modern Probabilistic Model Checker"
 
-# With the PROJECT_LOGO tag one can specify an logo or icon that is 
-# included in the documentation. The maximum height of the logo should not 
-# exceed 55 pixels and the maximum width should not exceed 200 pixels. 
-# Doxygen will copy the logo to the output directory.
+# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
+# in the documentation. The maximum height of the logo should not exceed 55
+# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
+# the logo to the output directory.
 
-PROJECT_LOGO           = 
+# TODO: set logo?
+PROJECT_LOGO           =
 
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) 
-# base path where the generated documentation will be put. 
-# If a relative path is entered, it will be relative to the location 
-# where doxygen was started. If left blank the current directory will be used.
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
+# into which the generated documentation will be written. If a relative path is
+# entered, it will be relative to the location where doxygen was started. If
+# left blank the current directory will be used.
 
 OUTPUT_DIRECTORY       = "@CMAKE_DOXYGEN_OUTPUT_DIR@"
 
-# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 
-# 4096 sub-directories (in 2 levels) under the output directory of each output 
-# format and will distribute the generated files over these directories. 
-# Enabling this option can be useful when feeding doxygen a huge amount of 
-# source files, where putting all generated files in the same directory would 
-# otherwise cause performance problems for the file system.
-
-CREATE_SUBDIRS         = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all 
-# documentation generated by doxygen is written. Doxygen will use this 
-# information to generate all constant output in the proper language. 
-# The default language is English, other supported languages are: 
-# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, 
-# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, 
-# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English 
-# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, 
-# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, 
-# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese.
+# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
+# directories (in 2 levels) under the output directory of each output format and
+# will distribute the generated files over these directories. Enabling this
+# option can be useful when feeding doxygen a huge amount of source files, where
+# putting all generated files in the same directory would otherwise causes
+# performance problems for the file system.
+# The default value is: NO.
+
+CREATE_SUBDIRS         = YES
+
+# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
+# characters to appear in the names of generated files. If set to NO, non-ASCII
+# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
+# U+3044.
+# The default value is: NO.
+
+ALLOW_UNICODE_NAMES    = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
+# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
+# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
+# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
+# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
+# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
+# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
+# Ukrainian and Vietnamese.
+# The default value is: English.
 
 OUTPUT_LANGUAGE        = English
 
-# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will 
-# include brief member descriptions after the members that are listed in 
-# the file and class documentation (similar to JavaDoc). 
-# Set to NO to disable this.
+# The OUTPUT_TEXT_DIRECTION tag is used to specify the direction in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all generated output in the proper direction.
+# Possible values are: None, LTR, RTL and Context.
+# The default value is: None.
+
+OUTPUT_TEXT_DIRECTION  = None
+
+# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
+# descriptions after the members that are listed in the file and class
+# documentation (similar to Javadoc). Set to NO to disable this.
+# The default value is: YES.
 
 BRIEF_MEMBER_DESC      = YES
 
-# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend 
-# the brief description of a member or function before the detailed description. 
-# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the 
+# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
+# description of a member or function before the detailed description
+#
+# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
 # brief descriptions will be completely suppressed.
+# The default value is: YES.
 
 REPEAT_BRIEF           = YES
 
-# This tag implements a quasi-intelligent brief description abbreviator 
-# that is used to form the text in various listings. Each string 
-# in this list, if found as the leading text of the brief description, will be 
-# stripped from the text and the result after processing the whole list, is 
-# used as the annotated text. Otherwise, the brief description is used as-is. 
-# If left blank, the following values are used ("$name" is automatically 
-# replaced with the name of the entity): "The $name class" "The $name widget" 
-# "The $name file" "is" "provides" "specifies" "contains" 
-# "represents" "a" "an" "the"
+# This tag implements a quasi-intelligent brief description abbreviator that is
+# used to form the text in various listings. Each string in this list, if found
+# as the leading text of the brief description, will be stripped from the text
+# and the result, after processing the whole list, is used as the annotated
+# text. Otherwise, the brief description is used as-is. If left blank, the
+# following values are used ($name is automatically replaced with the name of
+# the entity):The $name class, The $name widget, The $name file, is, provides,
+# specifies, contains, represents, a, an and the.
 
 ABBREVIATE_BRIEF       = "The $name class" \
                          "The $name widget" \
@@ -112,247 +140,310 @@ ABBREVIATE_BRIEF       = "The $name class" \
                          an \
                          the
 
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then 
-# Doxygen will generate a detailed section even if there is only a brief 
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# doxygen will generate a detailed section even if there is only a brief
 # description.
+# The default value is: NO.
 
 ALWAYS_DETAILED_SEC    = NO
 
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all 
-# inherited members of a class in the documentation of that class as if those 
-# members were ordinary class members. Constructors, destructors and assignment 
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
 # operators of the base classes will not be shown.
+# The default value is: NO.
 
 INLINE_INHERITED_MEMB  = NO
 
-# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full 
-# path before files name in the file list and in the header files. If set 
-# to NO the shortest path that makes the file name unique will be used.
+# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
+# before files name in the file list and in the header files. If set to NO the
+# shortest path that makes the file name unique will be used
+# The default value is: YES.
 
 FULL_PATH_NAMES        = YES
 
-# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag 
-# can be used to strip a user-defined part of the path. Stripping is 
-# only done if one of the specified strings matches the left-hand part of 
-# the path. The tag can be used to show relative paths in the file list. 
-# If left blank the directory from which doxygen is run is used as the 
-# path to strip.
+# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
+# Stripping is only done if one of the specified strings matches the left-hand
+# part of the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the path to
+# strip.
+#
+# Note that you can specify absolute paths here, but also relative paths, which
+# will be relative from the directory where doxygen is started.
+# This tag requires that the tag FULL_PATH_NAMES is set to YES.
 
-STRIP_FROM_PATH        = 
+STRIP_FROM_PATH        =
 
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of 
-# the path mentioned in the documentation of a class, which tells 
-# the reader which header file to include in order to use a class. 
-# If left blank only the name of the header file containing the class 
-# definition is used. Otherwise one should specify the include paths that 
-# are normally passed to the compiler using the -I flag.
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
+# path mentioned in the documentation of a class, which tells the reader which
+# header file to include in order to use a class. If left blank only the name of
+# the header file containing the class definition is used. Otherwise one should
+# specify the list of include paths that are normally passed to the compiler
+# using the -I flag.
 
-STRIP_FROM_INC_PATH    = 
+STRIP_FROM_INC_PATH    =
 
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter 
-# (but less readable) file names. This can be useful if your file system 
-# doesn't support long names like on DOS, Mac, or CD-ROM.
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
+# less readable) file names. This can be useful is your file systems doesn't
+# support long names like on DOS, Mac, or CD-ROM.
+# The default value is: NO.
 
 SHORT_NAMES            = NO
 
-# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen 
-# will interpret the first line (until the first dot) of a JavaDoc-style 
-# comment as the brief description. If set to NO, the JavaDoc 
-# comments will behave just like regular Qt-style comments 
-# (thus requiring an explicit @brief command for a brief description.)
+# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
+# first line (until the first dot) of a Javadoc-style comment as the brief
+# description. If set to NO, the Javadoc-style will behave just like regular Qt-
+# style comments (thus requiring an explicit @brief command for a brief
+# description.)
+# The default value is: NO.
 
-JAVADOC_AUTOBRIEF      = NO
+JAVADOC_AUTOBRIEF      = YES
 
-# If the QT_AUTOBRIEF tag is set to YES then Doxygen will 
-# interpret the first line (until the first dot) of a Qt-style 
-# comment as the brief description. If set to NO, the comments 
-# will behave just like regular Qt-style comments (thus requiring 
-# an explicit \brief command for a brief description.)
+# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
+# line (until the first dot) of a Qt-style comment as the brief description. If
+# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
+# requiring an explicit \brief command for a brief description.)
+# The default value is: NO.
 
-QT_AUTOBRIEF           = NO
+QT_AUTOBRIEF           = YES
 
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen 
-# treat a multi-line C++ special comment block (i.e. a block of //! or /// 
-# comments) as a brief description. This used to be the default behaviour. 
-# The new default is to treat a multi-line C++ comment block as a detailed 
-# description. Set this tag to YES if you prefer the old behaviour instead.
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
+# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
+# a brief description. This used to be the default behavior. The new default is
+# to treat a multi-line C++ comment block as a detailed description. Set this
+# tag to YES if you prefer the old behavior instead.
+#
+# Note that setting this tag to YES also means that rational rose comments are
+# not recognized any more.
+# The default value is: NO.
 
 MULTILINE_CPP_IS_BRIEF = NO
 
-# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented 
-# member inherits the documentation from any documented member that it 
-# re-implements.
+# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
+# documentation from any documented member that it re-implements.
+# The default value is: YES.
 
 INHERIT_DOCS           = YES
 
-# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce 
-# a new page for each member. If set to NO, the documentation of a member will 
-# be part of the file/class/namespace that contains it.
+# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
+# page for each member. If set to NO, the documentation of a member will be part
+# of the file/class/namespace that contains it.
+# The default value is: NO.
 
 SEPARATE_MEMBER_PAGES  = NO
 
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. 
-# Doxygen uses this value to replace tabs by spaces in code fragments.
+# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
+# uses this value to replace tabs by spaces in code fragments.
+# Minimum value: 1, maximum value: 16, default value: 4.
+
+TAB_SIZE               = 4
+
+# This tag can be used to specify a number of aliases that act as commands in
+# the documentation. An alias has the form:
+# name=value
+# For example adding
+# "sideeffect=@par Side Effects:\n"
+# will allow you to put the command \sideeffect (or @sideeffect) in the
+# documentation, which will result in a user-defined paragraph with heading
+# "Side Effects:". You can put \n's in the value part of an alias to insert
+# newlines (in the resulting output). You can put ^^ in the value part of an
+# alias to insert a newline as if a physical newline was in the original file.
+# When you need a literal { or } or , in the value part of an alias you have to
+# escape them by means of a backslash (\), this can lead to conflicts with the
+# commands \{ and \} for these it is advised to use the version @{ and @} or use
+# a double escape (\\{ and \\})
+
+ALIASES                =
+
+# This tag can be used to specify a number of word-keyword mappings (TCL only).
+# A mapping has the form "name=value". For example adding "class=itcl::class"
+# will allow you to use the command class in the itcl::class meaning.
+
+TCL_SUBST              =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
+# only. Doxygen will then generate output that is more tailored for C. For
+# instance, some of the names that are used will be different. The list of all
+# members will be omitted, etc.
+# The default value is: NO.
 
-TAB_SIZE               = 8
+OPTIMIZE_OUTPUT_FOR_C  = NO
 
-# This tag can be used to specify a number of aliases that acts 
-# as commands in the documentation. An alias has the form "name=value". 
-# For example adding "sideeffect=\par Side Effects:\n" will allow you to 
-# put the command \sideeffect (or @sideeffect) in the documentation, which 
-# will result in a user-defined paragraph with heading "Side Effects:". 
-# You can put \n's in the value part of an alias to insert newlines.
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
+# Python sources only. Doxygen will then generate output that is more tailored
+# for that language. For instance, namespaces will be presented as packages,
+# qualified scopes will look different, etc.
+# The default value is: NO.
 
-ALIASES                = 
+OPTIMIZE_OUTPUT_JAVA   = NO
 
-# This tag can be used to specify a number of word-keyword mappings (TCL only). 
-# A mapping has the form "name=value". For example adding 
-# "class=itcl::class" will allow you to use the command class in the 
-# itcl::class meaning.
+# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
+# sources. Doxygen will then generate output that is tailored for Fortran.
+# The default value is: NO.
 
-TCL_SUBST              = 
+OPTIMIZE_FOR_FORTRAN   = NO
 
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C 
-# sources only. Doxygen will then generate output that is more tailored for C. 
-# For instance, some of the names that are used will be different. The list 
-# of all members will be omitted, etc.
+# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
+# sources. Doxygen will then generate output that is tailored for VHDL.
+# The default value is: NO.
 
-OPTIMIZE_OUTPUT_FOR_C  = NO
+OPTIMIZE_OUTPUT_VHDL   = NO
 
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java 
-# sources only. Doxygen will then generate output that is more tailored for 
-# Java. For instance, namespaces will be presented as packages, qualified 
-# scopes will look different, etc.
+# Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice
+# sources only. Doxygen will then generate output that is more tailored for that
+# language. For instance, namespaces will be presented as modules, types will be
+# separated into more groups, etc.
+# The default value is: NO.
+
+OPTIMIZE_OUTPUT_SLICE  = NO
+
+# Doxygen selects the parser to use depending on the extension of the files it
+# parses. With this tag you can assign which parser to use for a given
+# extension. Doxygen has a built-in mapping, but you can override or extend it
+# using this tag. The format is ext=language, where ext is a file extension, and
+# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
+# Csharp (C#), C, C++, D, PHP, md (Markdown), Objective-C, Python, Slice,
+# Fortran (fixed format Fortran: FortranFixed, free formatted Fortran:
+# FortranFree, unknown formatted Fortran: Fortran. In the later case the parser
+# tries to guess whether the code is fixed or free formatted code, this is the
+# default for Fortran type files), VHDL, tcl. For instance to make doxygen treat
+# .inc files as Fortran files (default is PHP), and .f files as C (default is
+# Fortran), use: inc=Fortran f=C.
+#
+# Note: For files without extension you can use no_extension as a placeholder.
+#
+# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
+# the files are not read by doxygen.
 
-OPTIMIZE_OUTPUT_JAVA   = NO
+EXTENSION_MAPPING      =
 
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran 
-# sources only. Doxygen will then generate output that is more tailored for 
-# Fortran.
+# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
+# according to the Markdown format, which allows for more readable
+# documentation. See https://daringfireball.net/projects/markdown/ for details.
+# The output of markdown processing is further processed by doxygen, so you can
+# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
+# case of backward compatibilities issues.
+# The default value is: YES.
 
-OPTIMIZE_FOR_FORTRAN   = NO
+MARKDOWN_SUPPORT       = YES
 
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL 
-# sources. Doxygen will then generate output that is tailored for 
-# VHDL.
+# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
+# to that level are automatically included in the table of contents, even if
+# they do not have an id attribute.
+# Note: This feature currently applies only to Markdown headings.
+# Minimum value: 0, maximum value: 99, default value: 0.
+# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
 
-OPTIMIZE_OUTPUT_VHDL   = NO
+TOC_INCLUDE_HEADINGS   = 0
 
-# Doxygen selects the parser to use depending on the extension of the files it 
-# parses. With this tag you can assign which parser to use for a given extension. 
-# Doxygen has a built-in mapping, but you can override or extend it using this 
-# tag. The format is ext=language, where ext is a file extension, and language 
-# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C, 
-# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make 
-# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C 
-# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions 
-# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen.
-
-EXTENSION_MAPPING      = 
-
-# If MARKDOWN_SUPPORT is enabled (the default) then doxygen pre-processes all 
-# comments according to the Markdown format, which allows for more readable 
-# documentation. See http://daringfireball.net/projects/markdown/ for details. 
-# The output of markdown processing is further processed by doxygen, so you 
-# can mix doxygen, HTML, and XML commands with Markdown formatting. 
-# Disable only in case of backward compatibilities issues.
+# When enabled doxygen tries to link words that correspond to documented
+# classes, or namespaces to their corresponding documentation. Such a link can
+# be prevented in individual cases by putting a % sign in front of the word or
+# globally by setting AUTOLINK_SUPPORT to NO.
+# The default value is: YES.
 
-MARKDOWN_SUPPORT       = YES
+AUTOLINK_SUPPORT       = YES
 
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want 
-# to include (a tag file for) the STL sources as input, then you should 
-# set this tag to YES in order to let doxygen match functions declarations and 
-# definitions whose arguments contain STL classes (e.g. func(std::string); v.s. 
-# func(std::string) {}). This also makes the inheritance and collaboration 
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
+# to include (a tag file for) the STL sources as input, then you should set this
+# tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string);
+# versus func(std::string) {}). This also make the inheritance and collaboration
 # diagrams that involve STL classes more complete and accurate.
+# The default value is: NO.
 
 BUILTIN_STL_SUPPORT    = NO
 
-# If you use Microsoft's C++/CLI language, you should set this option to YES to 
+# If you use Microsoft's C++/CLI language, you should set this option to YES to
 # enable parsing support.
+# The default value is: NO.
 
 CPP_CLI_SUPPORT        = NO
 
-# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. 
-# Doxygen will parse them like normal C++ but will assume all classes use public 
-# instead of private inheritance when no explicit protection keyword is present.
+# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
+# https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen
+# will parse them like normal C++ but will assume all classes use public instead
+# of private inheritance when no explicit protection keyword is present.
+# The default value is: NO.
 
 SIP_SUPPORT            = NO
 
-# For Microsoft's IDL there are propget and propput attributes to indicate getter 
-# and setter methods for a property. Setting this option to YES (the default) 
-# will make doxygen replace the get and set methods by a property in the 
-# documentation. This will only work if the methods are indeed getting or 
-# setting a simple type. If this is not the case, or you want to show the 
-# methods anyway, you should set this option to NO.
+# For Microsoft's IDL there are propget and propput attributes to indicate
+# getter and setter methods for a property. Setting this option to YES will make
+# doxygen to replace the get and set methods by a property in the documentation.
+# This will only work if the methods are indeed getting or setting a simple
+# type. If this is not the case, or you want to show the methods anyway, you
+# should set this option to NO.
+# The default value is: YES.
 
 IDL_PROPERTY_SUPPORT   = YES
 
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC 
-# tag is set to YES, then doxygen will reuse the documentation of the first 
-# member in the group (if any) for the other members of the group. By default 
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
 # all members of a group must be documented explicitly.
+# The default value is: NO.
 
 DISTRIBUTE_GROUP_DOC   = NO
 
-# Set the SUBGROUPING tag to YES (the default) to allow class member groups of 
-# the same type (for instance a group of public functions) to be put as a 
-# subgroup of that type (e.g. under the Public Functions section). Set it to 
-# NO to prevent subgrouping. Alternatively, this can be done per class using 
-# the \nosubgrouping command.
+# If one adds a struct or class to a group and this option is enabled, then also
+# any nested class or struct is added to the same group. By default this option
+# is disabled and one has to add nested compounds explicitly via \ingroup.
+# The default value is: NO.
+
+GROUP_NESTED_COMPOUNDS = NO
+
+# Set the SUBGROUPING tag to YES to allow class member groups of the same type
+# (for instance a group of public functions) to be put as a subgroup of that
+# type (e.g. under the Public Functions section). Set it to NO to prevent
+# subgrouping. Alternatively, this can be done per class using the
+# \nosubgrouping command.
+# The default value is: YES.
 
 SUBGROUPING            = YES
 
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and 
-# unions are shown inside the group in which they are included (e.g. using 
-# @ingroup) instead of on a separate page (for HTML and Man pages) or 
-# section (for LaTeX and RTF).
+# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
+# are shown inside the group in which they are included (e.g. using \ingroup)
+# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
+# and RTF).
+#
+# Note that this feature does not work in combination with
+# SEPARATE_MEMBER_PAGES.
+# The default value is: NO.
 
 INLINE_GROUPED_CLASSES = NO
 
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and 
-# unions with only public data fields will be shown inline in the documentation 
-# of the scope in which they are defined (i.e. file, namespace, or group 
-# documentation), provided this scope is documented. If set to NO (the default), 
-# structs, classes, and unions are shown on a separate page (for HTML and Man 
-# pages) or section (for LaTeX and RTF).
+# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
+# with only public data fields or simple typedef fields will be shown inline in
+# the documentation of the scope in which they are defined (i.e. file,
+# namespace, or group documentation), provided this scope is documented. If set
+# to NO, structs, classes, and unions are shown on a separate page (for HTML and
+# Man pages) or section (for LaTeX and RTF).
+# The default value is: NO.
 
 INLINE_SIMPLE_STRUCTS  = NO
 
-# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum 
-# is documented as struct, union, or enum with the name of the typedef. So 
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct 
-# with name TypeT. When disabled the typedef will appear as a member of a file, 
-# namespace, or class. And the struct will be named TypeS. This can typically 
-# be useful for C code in case the coding convention dictates that all compound 
+# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
+# enum is documented as struct, union, or enum with the name of the typedef. So
+# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
+# with name TypeT. When disabled the typedef will appear as a member of a file,
+# namespace, or class. And the struct will be named TypeS. This can typically be
+# useful for C code in case the coding convention dictates that all compound
 # types are typedef'ed and only the typedef is referenced, never the tag name.
+# The default value is: NO.
 
 TYPEDEF_HIDES_STRUCT   = NO
 
-# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to 
-# determine which symbols to keep in memory and which to flush to disk. 
-# When the cache is full, less often used symbols will be written to disk. 
-# For small to medium size projects (<1000 input files) the default value is 
-# probably good enough. For larger projects a too small cache size can cause 
-# doxygen to be busy swapping symbols to and from disk most of the time 
-# causing a significant performance penalty. 
-# If the system has enough physical memory increasing the cache will improve the 
-# performance by keeping more symbols in memory. Note that the value works on 
-# a logarithmic scale so increasing the size by one will roughly double the 
-# memory usage. The cache size is given by this formula: 
-# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, 
-# corresponding to a cache size of 2^16 = 65536 symbols.
-
-SYMBOL_CACHE_SIZE      = 0
-
-# Similar to the SYMBOL_CACHE_SIZE the size of the symbol lookup cache can be 
-# set using LOOKUP_CACHE_SIZE. This cache is used to resolve symbols given 
-# their name and scope. Since this can be an expensive process and often the 
-# same symbol appear multiple times in the code, doxygen keeps a cache of 
-# pre-resolved symbols. If the cache is too small doxygen will become slower. 
-# If the cache is too large, memory is wasted. The cache size is given by this 
-# formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range is 0..9, the default is 0, 
-# corresponding to a cache size of 2^16 = 65536 symbols.
+# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
+# cache is used to resolve symbols given their name and scope. Since this can be
+# an expensive process and often the same symbol appears multiple times in the
+# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
+# doxygen will become slower. If the cache is too large, memory is wasted. The
+# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
+# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
+# symbols. At the end of a run doxygen will report the cache usage and suggest
+# the optimal cache size from a speed point of view.
+# Minimum value: 0, maximum value: 9, default value: 0.
 
 LOOKUP_CACHE_SIZE      = 0
 
@@ -360,351 +451,421 @@ LOOKUP_CACHE_SIZE      = 0
 # Build related configuration options
 #---------------------------------------------------------------------------
 
-# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in 
-# documentation are documented, even if no documentation was available. 
-# Private class members and static file members will be hidden unless 
-# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
+# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
+# documentation are documented, even if no documentation was available. Private
+# class members and static file members will be hidden unless the
+# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
+# Note: This will also disable the warnings about undocumented members that are
+# normally produced when WARNINGS is set to YES.
+# The default value is: NO.
 
 EXTRACT_ALL            = YES
 
-# If the EXTRACT_PRIVATE tag is set to YES all private members of a class 
-# will be included in the documentation.
+# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
+# be included in the documentation.
+# The default value is: NO.
 
 EXTRACT_PRIVATE        = NO
 
-# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal
+# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
 # scope will be included in the documentation.
+# The default value is: NO.
 
 EXTRACT_PACKAGE        = NO
 
-# If the EXTRACT_STATIC tag is set to YES all static members of a file 
-# will be included in the documentation.
+# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
+# included in the documentation.
+# The default value is: NO.
 
 EXTRACT_STATIC         = YES
 
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) 
-# defined locally in source files will be included in the documentation. 
-# If set to NO only classes defined in header files are included.
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
+# locally in source files will be included in the documentation. If set to NO,
+# only classes defined in header files are included. Does not have any effect
+# for Java sources.
+# The default value is: YES.
 
 EXTRACT_LOCAL_CLASSES  = YES
 
-# This flag is only useful for Objective-C code. When set to YES local 
-# methods, which are defined in the implementation section but not in 
-# the interface are included in the documentation. 
-# If set to NO (the default) only methods in the interface are included.
+# This flag is only useful for Objective-C code. If set to YES, local methods,
+# which are defined in the implementation section but not in the interface are
+# included in the documentation. If set to NO, only methods in the interface are
+# included.
+# The default value is: NO.
 
 EXTRACT_LOCAL_METHODS  = NO
 
-# If this flag is set to YES, the members of anonymous namespaces will be 
-# extracted and appear in the documentation as a namespace called 
-# 'anonymous_namespace{file}', where file will be replaced with the base 
-# name of the file that contains the anonymous namespace. By default 
-# anonymous namespaces are hidden.
+# If this flag is set to YES, the members of anonymous namespaces will be
+# extracted and appear in the documentation as a namespace called
+# 'anonymous_namespace{file}', where file will be replaced with the base name of
+# the file that contains the anonymous namespace. By default anonymous namespace
+# are hidden.
+# The default value is: NO.
 
 EXTRACT_ANON_NSPACES   = NO
 
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all 
-# undocumented members of documented classes, files or namespaces. 
-# If set to NO (the default) these members will be included in the 
-# various overviews, but no documentation section is generated. 
-# This option has no effect if EXTRACT_ALL is enabled.
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
+# undocumented members inside documented classes or files. If set to NO these
+# members will be included in the various overviews, but no documentation
+# section is generated. This option has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
 
 HIDE_UNDOC_MEMBERS     = NO
 
-# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all 
-# undocumented classes that are normally visible in the class hierarchy. 
-# If set to NO (the default) these classes will be included in the various 
-# overviews. This option has no effect if EXTRACT_ALL is enabled.
+# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy. If set
+# to NO, these classes will be included in the various overviews. This option
+# has no effect if EXTRACT_ALL is enabled.
+# The default value is: NO.
 
 HIDE_UNDOC_CLASSES     = NO
 
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all 
-# friend (class|struct|union) declarations. 
-# If set to NO (the default) these declarations will be included in the 
-# documentation.
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
+# (class|struct|union) declarations. If set to NO, these declarations will be
+# included in the documentation.
+# The default value is: NO.
 
 HIDE_FRIEND_COMPOUNDS  = NO
 
-# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any 
-# documentation blocks found inside the body of a function. 
-# If set to NO (the default) these blocks will be appended to the 
-# function's detailed documentation block.
+# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
+# documentation blocks found inside the body of a function. If set to NO, these
+# blocks will be appended to the function's detailed documentation block.
+# The default value is: NO.
 
 HIDE_IN_BODY_DOCS      = NO
 
-# The INTERNAL_DOCS tag determines if documentation 
-# that is typed after a \internal command is included. If the tag is set 
-# to NO (the default) then the documentation will be excluded. 
-# Set it to YES to include the internal documentation.
+# The INTERNAL_DOCS tag determines if documentation that is typed after a
+# \internal command is included. If the tag is set to NO then the documentation
+# will be excluded. Set it to YES to include the internal documentation.
+# The default value is: NO.
 
 INTERNAL_DOCS          = NO
 
-# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate 
-# file names in lower-case letters. If set to YES upper-case letters are also 
-# allowed. This is useful if you have classes or files whose names only differ 
-# in case and if your file system supports case sensitive file names. Windows 
+# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
+# names in lower-case letters. If set to YES, upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
 # and Mac users are advised to set this option to NO.
+# The default value is: system dependent.
 
 CASE_SENSE_NAMES       = NO
 
-# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen 
-# will show members with their full class and namespace scopes in the 
-# documentation. If set to YES the scope will be hidden.
+# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
+# their full class and namespace scopes in the documentation. If set to YES, the
+# scope will be hidden.
+# The default value is: NO.
 
 HIDE_SCOPE_NAMES       = NO
 
-# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen 
-# will put a list of the files that are included by a file in the documentation 
-# of that file.
+# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
+# append additional text to a page's title, such as Class Reference. If set to
+# YES the compound reference will be hidden.
+# The default value is: NO.
+
+HIDE_COMPOUND_REFERENCE= NO
+
+# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
+# the files that are included by a file in the documentation of that file.
+# The default value is: YES.
 
 SHOW_INCLUDE_FILES     = YES
 
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen 
-# will list include files with double quotes in the documentation 
-# rather than with sharp brackets.
+# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
+# grouped member an include statement to the documentation, telling the reader
+# which file to include in order to use the member.
+# The default value is: NO.
+
+SHOW_GROUPED_MEMB_INC  = NO
+
+# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
+# files with double quotes in the documentation rather than with sharp brackets.
+# The default value is: NO.
 
 FORCE_LOCAL_INCLUDES   = NO
 
-# If the INLINE_INFO tag is set to YES (the default) then a tag [inline] 
-# is inserted in the documentation for inline members.
+# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
+# documentation for inline members.
+# The default value is: YES.
 
 INLINE_INFO            = YES
 
-# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen 
-# will sort the (detailed) documentation of file and class members 
-# alphabetically by member name. If set to NO the members will appear in 
-# declaration order.
+# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
+# (detailed) documentation of file and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order.
+# The default value is: YES.
 
 SORT_MEMBER_DOCS       = YES
 
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the 
-# brief documentation of file, namespace and class members alphabetically 
-# by member name. If set to NO (the default) the members will appear in 
-# declaration order.
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
+# descriptions of file, namespace and class members alphabetically by member
+# name. If set to NO, the members will appear in declaration order. Note that
+# this will also influence the order of the classes in the class list.
+# The default value is: NO.
 
 SORT_BRIEF_DOCS        = NO
 
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen 
-# will sort the (brief and detailed) documentation of class members so that 
-# constructors and destructors are listed first. If set to NO (the default) 
-# the constructors will appear in the respective orders defined by 
-# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. 
-# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO 
-# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO.
+# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
+# (brief and detailed) documentation of class members so that constructors and
+# destructors are listed first. If set to NO the constructors will appear in the
+# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
+# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
+# member documentation.
+# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
+# detailed member documentation.
+# The default value is: NO.
 
 SORT_MEMBERS_CTORS_1ST = NO
 
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the 
-# hierarchy of group names into alphabetical order. If set to NO (the default) 
-# the group names will appear in their defined order.
+# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
+# of group names into alphabetical order. If set to NO the group names will
+# appear in their defined order.
+# The default value is: NO.
 
 SORT_GROUP_NAMES       = NO
 
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be 
-# sorted by fully-qualified names, including namespaces. If set to 
-# NO (the default), the class list will be sorted only by class name, 
-# not including the namespace part. 
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. 
-# Note: This option applies only to the class list, not to the 
-# alphabetical list.
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
+# fully-qualified names, including namespaces. If set to NO, the class list will
+# be sorted only by class name, not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the alphabetical
+# list.
+# The default value is: NO.
 
 SORT_BY_SCOPE_NAME     = NO
 
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to 
-# do proper type resolution of all parameters of a function it will reject a 
-# match between the prototype and the implementation of a member function even 
-# if there is only one candidate or it is obvious which candidate to choose 
-# by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen 
-# will still accept a match between prototype and implementation in such cases.
+# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
+# type resolution of all parameters of a function it will reject a match between
+# the prototype and the implementation of a member function even if there is
+# only one candidate or it is obvious which candidate to choose by doing a
+# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
+# accept a match between prototype and implementation in such cases.
+# The default value is: NO.
 
 STRICT_PROTO_MATCHING  = NO
 
-# The GENERATE_TODOLIST tag can be used to enable (YES) or 
-# disable (NO) the todo list. This list is created by putting \todo 
-# commands in the documentation.
+# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
+# list. This list is created by putting \todo commands in the documentation.
+# The default value is: YES.
 
 GENERATE_TODOLIST      = YES
 
-# The GENERATE_TESTLIST tag can be used to enable (YES) or 
-# disable (NO) the test list. This list is created by putting \test 
-# commands in the documentation.
+# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
+# list. This list is created by putting \test commands in the documentation.
+# The default value is: YES.
 
 GENERATE_TESTLIST      = YES
 
-# The GENERATE_BUGLIST tag can be used to enable (YES) or 
-# disable (NO) the bug list. This list is created by putting \bug 
-# commands in the documentation.
+# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
+# list. This list is created by putting \bug commands in the documentation.
+# The default value is: YES.
 
 GENERATE_BUGLIST       = YES
 
-# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or 
-# disable (NO) the deprecated list. This list is created by putting 
-# \deprecated commands in the documentation.
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
+# the deprecated list. This list is created by putting \deprecated commands in
+# the documentation.
+# The default value is: YES.
 
 GENERATE_DEPRECATEDLIST= YES
 
-# The ENABLED_SECTIONS tag can be used to enable conditional 
-# documentation sections, marked by \if sectionname ... \endif.
+# The ENABLED_SECTIONS tag can be used to enable conditional documentation
+# sections, marked by \if <section_label> ... \endif and \cond <section_label>
+# ... \endcond blocks.
 
-ENABLED_SECTIONS       = 
+ENABLED_SECTIONS       =
 
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines 
-# the initial value of a variable or macro consists of for it to appear in 
-# the documentation. If the initializer consists of more lines than specified 
-# here it will be hidden. Use a value of 0 to hide initializers completely. 
-# The appearance of the initializer of individual variables and macros in the 
-# documentation can be controlled using \showinitializer or \hideinitializer 
-# command in the documentation regardless of this setting.
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
+# initial value of a variable or macro / define can have for it to appear in the
+# documentation. If the initializer consists of more lines than specified here
+# it will be hidden. Use a value of 0 to hide initializers completely. The
+# appearance of the value of individual variables and macros / defines can be
+# controlled using \showinitializer or \hideinitializer command in the
+# documentation regardless of this setting.
+# Minimum value: 0, maximum value: 10000, default value: 30.
 
 MAX_INITIALIZER_LINES  = 30
 
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated 
-# at the bottom of the documentation of classes and structs. If set to YES the 
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
+# the bottom of the documentation of classes and structs. If set to YES, the
 # list will mention the files that were used to generate the documentation.
+# The default value is: YES.
 
 SHOW_USED_FILES        = YES
 
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. 
-# This will remove the Files entry from the Quick Index and from the 
-# Folder Tree View (if specified). The default is YES.
+# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
+# will remove the Files entry from the Quick Index and from the Folder Tree View
+# (if specified).
+# The default value is: YES.
 
 SHOW_FILES             = YES
 
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the 
-# Namespaces page.  This will remove the Namespaces entry from the Quick Index 
-# and from the Folder Tree View (if specified). The default is YES.
+# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
+# page. This will remove the Namespaces entry from the Quick Index and from the
+# Folder Tree View (if specified).
+# The default value is: YES.
 
 SHOW_NAMESPACES        = YES
 
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that 
-# doxygen should invoke to get the current version for each file (typically from 
-# the version control system). Doxygen will invoke the program by executing (via 
-# popen()) the command <command> <input-file>, where <command> is the value of 
-# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file 
-# provided by doxygen. Whatever the program writes to standard output 
-# is used as the file version. See the manual for examples.
-
-FILE_VERSION_FILTER    = 
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed 
-# by doxygen. The layout file controls the global structure of the generated 
-# output files in an output format independent way. To create the layout file 
-# that represents doxygen's defaults, run doxygen with the -l option. 
-# You can optionally specify a file name after the option, if omitted 
-# DoxygenLayout.xml will be used as the name of the layout file.
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from
+# the version control system). Doxygen will invoke the program by executing (via
+# popen()) the command command input-file, where command is the value of the
+# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
+# by doxygen. Whatever the program writes to standard output is used as the file
+# version. For an example see the documentation.
+
+FILE_VERSION_FILTER    =
+
+# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
+# by doxygen. The layout file controls the global structure of the generated
+# output files in an output format independent way. To create the layout file
+# that represents doxygen's defaults, run doxygen with the -l option. You can
+# optionally specify a file name after the option, if omitted DoxygenLayout.xml
+# will be used as the name of the layout file.
+#
+# Note that if you run doxygen from a directory containing a file called
+# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
+# tag is left empty.
 
-LAYOUT_FILE            = 
+LAYOUT_FILE            =
 
-# The CITE_BIB_FILES tag can be used to specify one or more bib files 
-# containing the references data. This must be a list of .bib files. The 
-# .bib extension is automatically appended if omitted. Using this command 
-# requires the bibtex tool to be installed. See also 
-# http://en.wikipedia.org/wiki/BibTeX for more info. For LaTeX the style 
-# of the bibliography can be controlled using LATEX_BIB_STYLE. To use this 
-# feature you need bibtex and perl available in the search path.
+# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
+# the reference definitions. This must be a list of .bib files. The .bib
+# extension is automatically appended if omitted. This requires the bibtex tool
+# to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info.
+# For LaTeX the style of the bibliography can be controlled using
+# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
+# search path. See also \cite for info how to create references.
 
-CITE_BIB_FILES         = 
+CITE_BIB_FILES         =
 
 #---------------------------------------------------------------------------
-# configuration options related to warning and progress messages
+# Configuration options related to warning and progress messages
 #---------------------------------------------------------------------------
 
-# The QUIET tag can be used to turn on/off the messages that are generated 
-# by doxygen. Possible values are YES and NO. If left blank NO is used.
+# The QUIET tag can be used to turn on/off the messages that are generated to
+# standard output by doxygen. If QUIET is set to YES this implies that the
+# messages are off.
+# The default value is: NO.
 
 QUIET                  = NO
 
-# The WARNINGS tag can be used to turn on/off the warning messages that are 
-# generated by doxygen. Possible values are YES and NO. If left blank 
-# NO is used.
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
+# this implies that the warnings are on.
+#
+# Tip: Turn warnings on while writing the documentation.
+# The default value is: YES.
 
 WARNINGS               = YES
 
-# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings 
-# for undocumented members. If EXTRACT_ALL is set to YES then this flag will 
-# automatically be disabled.
+# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
+# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
+# will automatically be disabled.
+# The default value is: YES.
 
 WARN_IF_UNDOCUMENTED   = YES
 
-# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for 
-# potential errors in the documentation, such as not documenting some 
-# parameters in a documented function, or documenting parameters that 
-# don't exist or using markup commands wrongly.
+# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some parameters
+# in a documented function, or documenting parameters that don't exist or using
+# markup commands wrongly.
+# The default value is: YES.
 
 WARN_IF_DOC_ERROR      = YES
 
-# The WARN_NO_PARAMDOC option can be enabled to get warnings for 
-# functions that are documented, but have no documentation for their parameters 
-# or return value. If set to NO (the default) doxygen will only warn about 
-# wrong or incomplete parameter documentation, but not about the absence of 
-# documentation.
+# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
+# are documented, but have no documentation for their parameters or return
+# value. If set to NO, doxygen will only warn about wrong or incomplete
+# parameter documentation, but not about the absence of documentation. If
+# EXTRACT_ALL is set to YES then this flag will automatically be disabled.
+# The default value is: NO.
 
+# TODO: enable to complete documentation
 WARN_NO_PARAMDOC       = NO
 
-# The WARN_FORMAT tag determines the format of the warning messages that 
-# doxygen can produce. The string should contain the $file, $line, and $text 
-# tags, which will be replaced by the file and line number from which the 
-# warning originated and the warning text. Optionally the format may contain 
-# $version, which will be replaced by the version of the file (if it could 
-# be obtained via FILE_VERSION_FILTER)
+# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
+# a warning is encountered.
+# The default value is: NO.
+
+WARN_AS_ERROR          = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that doxygen
+# can produce. The string should contain the $file, $line, and $text tags, which
+# will be replaced by the file and line number from which the warning originated
+# and the warning text. Optionally the format may contain $version, which will
+# be replaced by the version of the file (if it could be obtained via
+# FILE_VERSION_FILTER)
+# The default value is: $file:$line: $text.
 
 WARN_FORMAT            = "$file:$line: $text"
 
-# The WARN_LOGFILE tag can be used to specify a file to which warning 
-# and error messages should be written. If left blank the output is written 
-# to stderr.
+# The WARN_LOGFILE tag can be used to specify a file to which warning and error
+# messages should be written. If left blank the output is written to standard
+# error (stderr).
 
-WARN_LOGFILE           = 
+WARN_LOGFILE           =
 
 #---------------------------------------------------------------------------
-# configuration options related to the input files
+# Configuration options related to the input files
 #---------------------------------------------------------------------------
 
-# The INPUT tag can be used to specify the files and/or directories that contain 
-# documented source files. You may enter file names like "myfile.cpp" or 
-# directories like "/usr/src/myproject". Separate the files or directories 
-# with spaces.
+# The INPUT tag is used to specify the files and/or directories that contain
+# documented source files. You may enter file names like myfile.cpp or
+# directories like /usr/src/myproject. Separate the files or directories with
+# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
+# Note: If this tag is empty the current directory is searched.
 
 INPUT                  = "@CMAKE_DOXYGEN_INPUT_LIST@"
 
-# This tag can be used to specify the character encoding of the source files 
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is 
-# also the default input encoding. Doxygen uses libiconv (or the iconv built 
-# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for 
-# the list of possible encodings.
+# This tag can be used to specify the character encoding of the source files
+# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
+# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
+# documentation (see: https://www.gnu.org/software/libiconv/) for the list of
+# possible encodings.
+# The default value is: UTF-8.
 
 INPUT_ENCODING         = UTF-8
 
-# If the value of the INPUT tag contains directories, you can use the 
-# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp 
-# and *.h) to filter out the source-files in the directories. If left 
-# blank the following patterns are tested: 
-# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh 
-# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py 
-# *.f90 *.f *.for *.vhd *.vhdl
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
+# *.h) to filter out the source-files in the directories.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# read by doxygen.
+#
+# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
+# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
+# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
+# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
+# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
 
 FILE_PATTERNS          = *.c \
                          *.cc \
                          *.cxx \
                          *.cpp \
                          *.c++ \
-                         *.d \
                          *.java \
                          *.ii \
                          *.ixx \
                          *.ipp \
                          *.i++ \
                          *.inl \
+                         *.idl \
+                         *.ddl \
+                         *.odl \
                          *.h \
                          *.hh \
                          *.hxx \
                          *.hpp \
                          *.h++ \
-                         *.idl \
-                         *.odl \
                          *.cs \
+                         *.d \
                          *.php \
-                         *.php3 \
+                         *.php4 \
+                         *.php5 \
+                         *.phtml \
                          *.inc \
                          *.m \
                          *.markdown \
@@ -712,892 +873,1349 @@ FILE_PATTERNS          = *.c \
                          *.mm \
                          *.dox \
                          *.py \
+                         *.pyw \
                          *.f90 \
+                         *.f95 \
+                         *.f03 \
+                         *.f08 \
                          *.f \
                          *.for \
+                         *.tcl \
                          *.vhd \
-                         *.vhdl
+                         *.vhdl \
+                         *.ucf \
+                         *.qsf \
+                         *.ice
 
-# The RECURSIVE tag can be used to turn specify whether or not subdirectories 
-# should be searched for input files as well. Possible values are YES and NO. 
-# If left blank NO is used.
+# The RECURSIVE tag can be used to specify whether or not subdirectories should
+# be searched for input files as well.
+# The default value is: NO.
 
 RECURSIVE              = YES
 
-# The EXCLUDE tag can be used to specify files and/or directories that should be 
-# excluded from the INPUT source files. This way you can easily exclude a 
-# subdirectory from a directory tree whose root is specified with the INPUT tag. 
-# Note that relative paths are relative to the directory from which doxygen is 
+# The EXCLUDE tag can be used to specify files and/or directories that should be
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+#
+# Note that relative paths are relative to the directory from which doxygen is
 # run.
 
-EXCLUDE                = 
+EXCLUDE                =
 
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or 
-# directories that are symbolic links (a Unix file system feature) are excluded 
+# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
+# directories that are symbolic links (a Unix file system feature) are excluded
 # from the input.
+# The default value is: NO.
 
 EXCLUDE_SYMLINKS       = NO
 
-# If the value of the INPUT tag contains directories, you can use the 
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude 
-# certain files from those directories. Note that the wildcards are matched 
-# against the file with absolute path, so to exclude all test directories 
-# for example use the pattern */test/*
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories.
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories for example use the pattern */test/*
 
-EXCLUDE_PATTERNS       = 
+EXCLUDE_PATTERNS       =
 
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names 
-# (namespaces, classes, functions, etc.) that should be excluded from the 
-# output. The symbol name can be a fully qualified name, a word, or if the 
-# wildcard * is used, a substring. Examples: ANamespace, AClass, 
+# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
+# (namespaces, classes, functions, etc.) that should be excluded from the
+# output. The symbol name can be a fully qualified name, a word, or if the
+# wildcard * is used, a substring. Examples: ANamespace, AClass,
 # AClass::ANamespace, ANamespace::*Test
+#
+# Note that the wildcards are matched against the file with absolute path, so to
+# exclude all test directories use the pattern */test/*
 
-EXCLUDE_SYMBOLS        = 
+EXCLUDE_SYMBOLS        =
 
-# The EXAMPLE_PATH tag can be used to specify one or more files or 
-# directories that contain example code fragments that are included (see 
-# the \include command).
+# The EXAMPLE_PATH tag can be used to specify one or more files or directories
+# that contain example code fragments that are included (see the \include
+# command).
 
-EXAMPLE_PATH           = 
+EXAMPLE_PATH           =
 
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the 
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp 
-# and *.h) to filter out the source-files in the directories. If left 
-# blank all files are included.
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
+# *.h) to filter out the source-files in the directories. If left blank all
+# files are included.
 
 EXAMPLE_PATTERNS       = *
 
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be 
-# searched for input files to be used with the \include or \dontinclude 
-# commands irrespective of the value of the RECURSIVE tag. 
-# Possible values are YES and NO. If left blank NO is used.
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude commands
+# irrespective of the value of the RECURSIVE tag.
+# The default value is: NO.
 
 EXAMPLE_RECURSIVE      = NO
 
-# The IMAGE_PATH tag can be used to specify one or more files or 
-# directories that contain image that are included in the documentation (see 
-# the \image command).
+# The IMAGE_PATH tag can be used to specify one or more files or directories
+# that contain images that are to be included in the documentation (see the
+# \image command).
 
-IMAGE_PATH             = 
+IMAGE_PATH             =
 
-# The INPUT_FILTER tag can be used to specify a program that doxygen should 
-# invoke to filter for each input file. Doxygen will invoke the filter program 
-# by executing (via popen()) the command <filter> <input-file>, where <filter> 
-# is the value of the INPUT_FILTER tag, and <input-file> is the name of an 
-# input file. Doxygen will then use the output that the filter program writes 
-# to standard output.  If FILTER_PATTERNS is specified, this tag will be 
-# ignored.
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command:
+#
+# <filter> <input-file>
+#
+# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
+# name of an input file. Doxygen will then use the output that the filter
+# program writes to standard output. If FILTER_PATTERNS is specified, this tag
+# will be ignored.
+#
+# Note that the filter must not add or remove lines; it is applied before the
+# code is scanned, but not when the output code is generated. If lines are added
+# or removed, the anchors will not be placed correctly.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
+
+INPUT_FILTER           =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form: pattern=filter
+# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
+# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
+# patterns match the file name, INPUT_FILTER is applied.
+#
+# Note that for custom extensions or not directly supported extensions you also
+# need to set EXTENSION_MAPPING for the extension otherwise the files are not
+# properly processed by doxygen.
 
-INPUT_FILTER           = 
+FILTER_PATTERNS        =
 
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern 
-# basis.  Doxygen will compare the file name with each pattern and apply the 
-# filter if there is a match.  The filters are a list of the form: 
-# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further 
-# info on how filters are used. If FILTER_PATTERNS is empty or if 
-# non of the patterns match the file name, INPUT_FILTER is applied.
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will also be used to filter the input files that are used for
+# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The default value is: NO.
 
-FILTER_PATTERNS        = 
+FILTER_SOURCE_FILES    = NO
 
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using 
-# INPUT_FILTER) will be used to filter the input files when producing source 
-# files to browse (i.e. when SOURCE_BROWSER is set to YES).
+# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
+# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
+# it is also possible to disable source filtering for a specific pattern using
+# *.ext= (so without naming a filter).
+# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
 
-FILTER_SOURCE_FILES    = NO
+FILTER_SOURCE_PATTERNS =
 
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file 
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) 
-# and it is also possible to disable source filtering for a specific pattern 
-# using *.ext= (so without naming a filter). This option only has effect when 
-# FILTER_SOURCE_FILES is enabled.
+# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
+# is part of the input, its contents will be placed on the main page
+# (index.html). This can be useful if you have a project on for instance GitHub
+# and want to reuse the introduction page also for the doxygen output.
 
-FILTER_SOURCE_PATTERNS = 
+USE_MDFILE_AS_MAINPAGE =
 
 #---------------------------------------------------------------------------
-# configuration options related to source browsing
+# Configuration options related to source browsing
 #---------------------------------------------------------------------------
 
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will 
-# be generated. Documented entities will be cross-referenced with these sources. 
-# Note: To get rid of all source code in the generated output, make sure also 
-# VERBATIM_HEADERS is set to NO.
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
+# generated. Documented entities will be cross-referenced with these sources.
+#
+# Note: To get rid of all source code in the generated output, make sure that
+# also VERBATIM_HEADERS is set to NO.
+# The default value is: NO.
 
 SOURCE_BROWSER         = YES
 
-# Setting the INLINE_SOURCES tag to YES will include the body 
-# of functions and classes directly in the documentation.
+# Setting the INLINE_SOURCES tag to YES will include the body of functions,
+# classes and enums directly into the documentation.
+# The default value is: NO.
 
 INLINE_SOURCES         = NO
 
-# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct 
-# doxygen to hide any special comment blocks from generated source code 
-# fragments. Normal C, C++ and Fortran comments will always remain visible.
+# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
+# special comment blocks from generated source code fragments. Normal C, C++ and
+# Fortran comments will always remain visible.
+# The default value is: YES.
 
 STRIP_CODE_COMMENTS    = YES
 
-# If the REFERENCED_BY_RELATION tag is set to YES 
-# then for each documented function all documented 
-# functions referencing it will be listed.
+# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
+# entity all documented functions referencing it will be listed.
+# The default value is: NO.
 
 REFERENCED_BY_RELATION = NO
 
-# If the REFERENCES_RELATION tag is set to YES 
-# then for each documented function all documented entities 
-# called/used by that function will be listed.
+# If the REFERENCES_RELATION tag is set to YES then for each documented function
+# all documented entities called/used by that function will be listed.
+# The default value is: NO.
 
 REFERENCES_RELATION    = NO
 
-# If the REFERENCES_LINK_SOURCE tag is set to YES (the default) 
-# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from 
-# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will 
-# link to the source code.  Otherwise they will link to the documentation.
+# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
+# to YES then the hyperlinks from functions in REFERENCES_RELATION and
+# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
+# link to the documentation.
+# The default value is: YES.
 
 REFERENCES_LINK_SOURCE = YES
 
-# If the USE_HTAGS tag is set to YES then the references to source code 
-# will point to the HTML generated by the htags(1) tool instead of doxygen 
-# built-in source browser. The htags tool is part of GNU's global source 
-# tagging system (see http://www.gnu.org/software/global/global.html). You 
-# will need version 4.8.6 or higher.
+# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
+# source code will show a tooltip with additional information such as prototype,
+# brief description and links to the definition and documentation. Since this
+# will make the HTML file larger and loading of large files a bit slower, you
+# can opt to disable this feature.
+# The default value is: YES.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
+
+SOURCE_TOOLTIPS        = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code will
+# point to the HTML generated by the htags(1) tool instead of doxygen built-in
+# source browser. The htags tool is part of GNU's global source tagging system
+# (see https://www.gnu.org/software/global/global.html). You will need version
+# 4.8.6 or higher.
+#
+# To use it do the following:
+# - Install the latest version of global
+# - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file
+# - Make sure the INPUT points to the root of the source tree
+# - Run doxygen as normal
+#
+# Doxygen will invoke htags (and that will in turn invoke gtags), so these
+# tools must be available from the command line (i.e. in the search path).
+#
+# The result: instead of the source browser generated by doxygen, the links to
+# source code will now point to the output of htags.
+# The default value is: NO.
+# This tag requires that the tag SOURCE_BROWSER is set to YES.
 
 USE_HTAGS              = NO
 
-# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen 
-# will generate a verbatim copy of the header file for each class for 
-# which an include is specified. Set to NO to disable this.
+# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
+# verbatim copy of the header file for each class for which an include is
+# specified. Set to NO to disable this.
+# See also: Section \class.
+# The default value is: YES.
 
 VERBATIM_HEADERS       = YES
 
 #---------------------------------------------------------------------------
-# configuration options related to the alphabetical class index
+# Configuration options related to the alphabetical class index
 #---------------------------------------------------------------------------
 
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index 
-# of all compounds will be generated. Enable this if the project 
-# contains a lot of classes, structs, unions or interfaces.
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
+# compounds will be generated. Enable this if the project contains a lot of
+# classes, structs, unions or interfaces.
+# The default value is: YES.
 
 ALPHABETICAL_INDEX     = YES
 
-# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then 
-# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns 
-# in which this list will be split (can be a number in the range [1..20])
+# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
+# which the alphabetical index list will be split.
+# Minimum value: 1, maximum value: 20, default value: 5.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
 
 COLS_IN_ALPHA_INDEX    = 5
 
-# In case all classes in a project start with a common prefix, all 
-# classes will be put under the same header in the alphabetical index. 
-# The IGNORE_PREFIX tag can be used to specify one or more prefixes that 
-# should be ignored while generating the index headers.
+# In case all classes in a project start with a common prefix, all classes will
+# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
+# can be used to specify a prefix (or a list of prefixes) that should be ignored
+# while generating the index headers.
+# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
 
-IGNORE_PREFIX          = 
+IGNORE_PREFIX          =
 
 #---------------------------------------------------------------------------
-# configuration options related to the HTML output
+# Configuration options related to the HTML output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_HTML tag is set to YES (the default) Doxygen will 
-# generate HTML output.
+# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
+# The default value is: YES.
 
 GENERATE_HTML          = YES
 
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. 
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be 
-# put in front of it. If left blank `html' will be used as the default path.
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 HTML_OUTPUT            = html
 
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for 
-# each generated HTML page (for example: .htm,.php,.asp). If it is left blank 
-# doxygen will generate files with .html extension.
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
+# generated HTML page (for example: .htm, .php, .asp).
+# The default value is: .html.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 HTML_FILE_EXTENSION    = .html
 
-# The HTML_HEADER tag can be used to specify a personal HTML header for 
-# each generated HTML page. If it is left blank doxygen will generate a 
-# standard header. Note that when using a custom header you are responsible  
-# for the proper inclusion of any scripts and style sheets that doxygen 
-# needs, which is dependent on the configuration options used. 
-# It is advised to generate a default header using "doxygen -w html 
-# header.html footer.html stylesheet.css YourConfigFile" and then modify 
-# that header. Note that the header is subject to change so you typically 
-# have to redo this when upgrading to a newer version of doxygen or when 
-# changing the value of configuration settings such as GENERATE_TREEVIEW!
-
-HTML_HEADER            = 
-
-# The HTML_FOOTER tag can be used to specify a personal HTML footer for 
-# each generated HTML page. If it is left blank doxygen will generate a 
-# standard footer.
-
-HTML_FOOTER            = 
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading 
-# style sheet that is used by each HTML page. It can be used to 
-# fine-tune the look of the HTML output. If the tag is left blank doxygen 
-# will generate a default style sheet. Note that doxygen will try to copy 
-# the style sheet file to the HTML output directory, so don't put your own 
-# style sheet in the HTML output directory as well, or it will be erased!
-
-HTML_STYLESHEET        = 
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or 
-# other source files which should be copied to the HTML output directory. Note 
-# that these files will be copied to the base HTML output directory. Use the 
-# $relpath$ marker in the HTML_HEADER and/or HTML_FOOTER files to load these 
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that 
-# the files will be copied as-is; there are no commands or markers available.
-
-HTML_EXTRA_FILES       = 
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. 
-# Doxygen will adjust the colors in the style sheet and background images 
-# according to this color. Hue is specified as an angle on a colorwheel, 
-# see http://en.wikipedia.org/wiki/Hue for more information. 
-# For instance the value 0 represents red, 60 is yellow, 120 is green, 
-# 180 is cyan, 240 is blue, 300 purple, and 360 is red again. 
-# The allowed range is 0 to 359.
-
-HTML_COLORSTYLE_HUE    = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of 
-# the colors in the HTML output. For a value of 0 the output will use 
-# grayscales only. A value of 255 will produce the most vivid colors.
+# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
+# each generated HTML page. If the tag is left blank doxygen will generate a
+# standard header.
+#
+# To get valid HTML the header file that includes any scripts and style sheets
+# that doxygen needs, which is dependent on the configuration options used (e.g.
+# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
+# default header using
+# doxygen -w html new_header.html new_footer.html new_stylesheet.css
+# YourConfigFile
+# and then modify the file new_header.html. See also section "Doxygen usage"
+# for information on how to generate the default header that doxygen normally
+# uses.
+# Note: The header is subject to change so you typically have to regenerate the
+# default header when upgrading to a newer version of doxygen. For a description
+# of the possible markers and block names see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_HEADER            =
+
+# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
+# generated HTML page. If the tag is left blank doxygen will generate a standard
+# footer. See HTML_HEADER for more information on how to generate a default
+# footer and what special commands can be used inside the footer. See also
+# section "Doxygen usage" for information on how to generate the default footer
+# that doxygen normally uses.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_FOOTER            =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
+# sheet that is used by each HTML page. It can be used to fine-tune the look of
+# the HTML output. If left blank doxygen will generate a default style sheet.
+# See also section "Doxygen usage" for information on how to generate the style
+# sheet that doxygen normally uses.
+# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
+# it is more robust and this tag (HTML_STYLESHEET) will in the future become
+# obsolete.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_STYLESHEET        =
+
+# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# cascading style sheets that are included after the standard style sheets
+# created by doxygen. Using this option one can overrule certain style aspects.
+# This is preferred over using HTML_STYLESHEET since it does not replace the
+# standard style sheet and is therefore more robust against future updates.
+# Doxygen will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list). For an example see the documentation.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_STYLESHEET  =
+
+# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the HTML output directory. Note
+# that these files will be copied to the base HTML output directory. Use the
+# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
+# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
+# files will be copied as-is; there are no commands or markers available.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_EXTRA_FILES       =
+
+# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
+# will adjust the colors in the style sheet and background images according to
+# this color. Hue is specified as an angle on a colorwheel, see
+# https://en.wikipedia.org/wiki/Hue for more information. For instance the value
+# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
+# purple, and 360 is red again.
+# Minimum value: 0, maximum value: 359, default value: 220.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_COLORSTYLE_HUE    = 210
+
+# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
+# in the HTML output. For a value of 0 the output will use grayscales only. A
+# value of 255 will produce the most vivid colors.
+# Minimum value: 0, maximum value: 255, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 HTML_COLORSTYLE_SAT    = 100
 
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to 
-# the luminance component of the colors in the HTML output. Values below 
-# 100 gradually make the output lighter, whereas values above 100 make 
-# the output darker. The value divided by 100 is the actual gamma applied, 
-# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2, 
-# and 100 does not change the gamma.
+# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
+# luminance component of the colors in the HTML output. Values below 100
+# gradually make the output lighter, whereas values above 100 make the output
+# darker. The value divided by 100 is the actual gamma applied, so 80 represents
+# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
+# change the gamma.
+# Minimum value: 40, maximum value: 240, default value: 80.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
-HTML_COLORSTYLE_GAMMA  = 80
+HTML_COLORSTYLE_GAMMA  = 100
 
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML 
-# page will contain the date and time when the page was generated. Setting 
-# this to NO can help when comparing the output of multiple runs.
+# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
+# page will contain the date and time when the page was generated. Setting this
+# to YES can help to show when doxygen was last run and thus if the
+# documentation is up to date.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 HTML_TIMESTAMP         = YES
 
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML 
-# documentation will contain sections that can be hidden and shown after the 
+# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
+# documentation will contain a main index with vertical navigation menus that
+# are dynamically created via Javascript. If disabled, the navigation index will
+# consists of multiple levels of tabs that are statically embedded in every HTML
+# page. Disable this option to support browsers that do not have Javascript,
+# like the Qt help browser.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+HTML_DYNAMIC_MENUS     = YES
+
+# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
+# documentation will contain sections that can be hidden and shown after the
 # page has loaded.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 HTML_DYNAMIC_SECTIONS  = NO
 
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of 
-# entries shown in the various tree structured indices initially; the user 
-# can expand and collapse entries dynamically later on. Doxygen will expand 
-# the tree to such a level that at most the specified number of entries are 
-# visible (unless a fully collapsed tree already exceeds this amount). 
-# So setting the number of entries 1 will produce a full collapsed tree by 
-# default. 0 is a special value representing an infinite number of entries 
-# and will result in a full expanded tree by default.
+# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
+# shown in the various tree structured indices initially; the user can expand
+# and collapse entries dynamically later on. Doxygen will expand the tree to
+# such a level that at most the specified number of entries are visible (unless
+# a fully collapsed tree already exceeds this amount). So setting the number of
+# entries 1 will produce a full collapsed tree by default. 0 is a special value
+# representing an infinite number of entries and will result in a full expanded
+# tree by default.
+# Minimum value: 0, maximum value: 9999, default value: 100.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 HTML_INDEX_NUM_ENTRIES = 100
 
-# If the GENERATE_DOCSET tag is set to YES, additional index files 
-# will be generated that can be used as input for Apple's Xcode 3 
-# integrated development environment, introduced with OSX 10.5 (Leopard). 
-# To create a documentation set, doxygen will generate a Makefile in the 
-# HTML output directory. Running make will produce the docset in that 
-# directory and running "make install" will install the docset in 
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find 
-# it at startup. 
-# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html 
-# for more information.
+# If the GENERATE_DOCSET tag is set to YES, additional index files will be
+# generated that can be used as input for Apple's Xcode 3 integrated development
+# environment (see: https://developer.apple.com/xcode/), introduced with OSX
+# 10.5 (Leopard). To create a documentation set, doxygen will generate a
+# Makefile in the HTML output directory. Running make will produce the docset in
+# that directory and running make install will install the docset in
+# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
+# startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy
+# genXcode/_index.html for more information.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 GENERATE_DOCSET        = NO
 
-# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the 
-# feed. A documentation feed provides an umbrella under which multiple 
-# documentation sets from a single provider (such as a company or product suite) 
-# can be grouped.
+# This tag determines the name of the docset feed. A documentation feed provides
+# an umbrella under which multiple documentation sets from a single provider
+# (such as a company or product suite) can be grouped.
+# The default value is: Doxygen generated docs.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
 
 DOCSET_FEEDNAME        = "Doxygen generated docs"
 
-# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that 
-# should uniquely identify the documentation set bundle. This should be a 
-# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen 
-# will append .docset to the name.
+# This tag specifies a string that should uniquely identify the documentation
+# set bundle. This should be a reverse domain-name style string, e.g.
+# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
 
 DOCSET_BUNDLE_ID       = org.doxygen.Project
 
-# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify 
-# the documentation publisher. This should be a reverse domain-name style 
+# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
+# the documentation publisher. This should be a reverse domain-name style
 # string, e.g. com.mycompany.MyDocSet.documentation.
+# The default value is: org.doxygen.Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
 
 DOCSET_PUBLISHER_ID    = org.doxygen.Publisher
 
-# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher.
+# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
+# The default value is: Publisher.
+# This tag requires that the tag GENERATE_DOCSET is set to YES.
 
 DOCSET_PUBLISHER_NAME  = Publisher
 
-# If the GENERATE_HTMLHELP tag is set to YES, additional index files 
-# will be generated that can be used as input for tools like the 
-# Microsoft HTML help workshop to generate a compiled HTML help file (.chm) 
-# of the generated HTML documentation.
+# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
+# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
+# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
+# (see: https://www.microsoft.com/en-us/download/details.aspx?id=21138) on
+# Windows.
+#
+# The HTML Help Workshop contains a compiler that can convert all HTML output
+# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
+# files are now used as the Windows 98 help format, and will replace the old
+# Windows help format (.hlp) on all Windows platforms in the future. Compressed
+# HTML files also contain an index, a table of contents, and you can search for
+# words in the documentation. The HTML workshop also contains a viewer for
+# compressed HTML files.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 GENERATE_HTMLHELP      = NO
 
-# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can 
-# be used to specify the file name of the resulting .chm file. You 
-# can add a path in front of the file if the result should not be 
+# The CHM_FILE tag can be used to specify the file name of the resulting .chm
+# file. You can add a path in front of the file if the result should not be
 # written to the html output directory.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
 
-CHM_FILE               = 
+CHM_FILE               =
 
-# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can 
-# be used to specify the location (absolute path including file name) of 
-# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run 
-# the HTML help compiler on the generated index.hhp.
+# The HHC_LOCATION tag can be used to specify the location (absolute path
+# including file name) of the HTML help compiler (hhc.exe). If non-empty,
+# doxygen will try to run the HTML help compiler on the generated index.hhp.
+# The file has to be specified with full path.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
 
-HHC_LOCATION           = 
+HHC_LOCATION           =
 
-# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag 
-# controls if a separate .chi index file is generated (YES) or that 
-# it should be included in the master .chm file (NO).
+# The GENERATE_CHI flag controls if a separate .chi index file is generated
+# (YES) or that it should be included in the master .chm file (NO).
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
 
 GENERATE_CHI           = NO
 
-# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING 
-# is used to encode HtmlHelp index (hhk), content (hhc) and project file 
-# content.
+# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
+# and project file content.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
 
-CHM_INDEX_ENCODING     = 
+CHM_INDEX_ENCODING     =
 
-# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag 
-# controls whether a binary table of contents is generated (YES) or a 
-# normal table of contents (NO) in the .chm file.
+# The BINARY_TOC flag controls whether a binary table of contents is generated
+# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
+# enables the Previous and Next buttons.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
 
 BINARY_TOC             = NO
 
-# The TOC_EXPAND flag can be set to YES to add extra items for group members 
-# to the contents of the HTML help documentation and to the tree view.
+# The TOC_EXPAND flag can be set to YES to add extra items for group members to
+# the table of contents of the HTML help documentation and to the tree view.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
 
 TOC_EXPAND             = NO
 
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and 
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated 
-# that can be used as input for Qt's qhelpgenerator to generate a 
-# Qt Compressed Help (.qch) of the generated HTML documentation.
+# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
+# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
+# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
+# (.qch) of the generated HTML documentation.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 GENERATE_QHP           = NO
 
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can 
-# be used to specify the file name of the resulting .qch file. 
-# The path specified is relative to the HTML output folder.
+# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
+# the file name of the resulting .qch file. The path specified is relative to
+# the HTML output folder.
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
-QCH_FILE               = 
+QCH_FILE               =
 
-# The QHP_NAMESPACE tag specifies the namespace to use when generating 
-# Qt Help Project output. For more information please see 
-# http://doc.trolltech.com/qthelpproject.html#namespace
+# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
+# Project output. For more information please see Qt Help Project / Namespace
+# (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace).
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
 QHP_NAMESPACE          = org.doxygen.Project
 
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating 
-# Qt Help Project output. For more information please see 
-# http://doc.trolltech.com/qthelpproject.html#virtual-folders
+# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
+# Help Project output. For more information please see Qt Help Project / Virtual
+# Folders (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-
+# folders).
+# The default value is: doc.
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
 QHP_VIRTUAL_FOLDER     = doc
 
-# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to 
-# add. For more information please see 
-# http://doc.trolltech.com/qthelpproject.html#custom-filters
+# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
+# filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
-QHP_CUST_FILTER_NAME   = 
+QHP_CUST_FILTER_NAME   =
 
-# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the 
-# custom filter to add. For more information please see 
-# <a href="http://doc.trolltech.com/qthelpproject.html#custom-filters"> 
-# Qt Help Project / Custom Filters</a>.
+# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
+# custom filter to add. For more information please see Qt Help Project / Custom
+# Filters (see: http://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-
+# filters).
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
-QHP_CUST_FILTER_ATTRS  = 
+QHP_CUST_FILTER_ATTRS  =
 
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this 
-# project's 
-# filter section matches. 
-# <a href="http://doc.trolltech.com/qthelpproject.html#filter-attributes"> 
-# Qt Help Project / Filter Attributes</a>.
+# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
+# project's filter section matches. Qt Help Project / Filter Attributes (see:
+# http://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes).
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
-QHP_SECT_FILTER_ATTRS  = 
+QHP_SECT_FILTER_ATTRS  =
 
-# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can 
-# be used to specify the location of Qt's qhelpgenerator. 
-# If non-empty doxygen will try to run qhelpgenerator on the generated 
-# .qhp file.
+# The QHG_LOCATION tag can be used to specify the location of Qt's
+# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
+# generated .qhp file.
+# This tag requires that the tag GENERATE_QHP is set to YES.
 
-QHG_LOCATION           = 
+QHG_LOCATION           =
 
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files  
-# will be generated, which together with the HTML files, form an Eclipse help 
-# plugin. To install this plugin and make it available under the help contents 
-# menu in Eclipse, the contents of the directory containing the HTML and XML 
-# files needs to be copied into the plugins directory of eclipse. The name of 
-# the directory within the plugins directory should be the same as 
-# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before 
-# the help appears.
+# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
+# generated, together with the HTML files, they form an Eclipse help plugin. To
+# install this plugin and make it available under the help contents menu in
+# Eclipse, the contents of the directory containing the HTML and XML files needs
+# to be copied into the plugins directory of eclipse. The name of the directory
+# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
+# After copying Eclipse needs to be restarted before the help appears.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 GENERATE_ECLIPSEHELP   = NO
 
-# A unique identifier for the eclipse help plugin. When installing the plugin 
-# the directory name containing the HTML and XML files should also have 
-# this name.
+# A unique identifier for the Eclipse help plugin. When installing the plugin
+# the directory name containing the HTML and XML files should also have this
+# name. Each documentation set should have its own identifier.
+# The default value is: org.doxygen.Project.
+# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
 
 ECLIPSE_DOC_ID         = org.doxygen.Project
 
-# The DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) 
-# at top of each HTML page. The value NO (the default) enables the index and 
-# the value YES disables it. Since the tabs have the same information as the 
-# navigation tree you can set this option to NO if you already set 
-# GENERATE_TREEVIEW to YES.
+# If you want full control over the layout of the generated HTML pages it might
+# be necessary to disable the index and replace it with your own. The
+# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
+# of each HTML page. A value of NO enables the index and the value YES disables
+# it. Since the tabs in the index contain the same information as the navigation
+# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 DISABLE_INDEX          = NO
 
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index 
-# structure should be generated to display hierarchical information. 
-# If the tag value is set to YES, a side panel will be generated 
-# containing a tree-like index structure (just like the one that 
-# is generated for HTML Help). For this to work a browser that supports 
-# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). 
-# Windows users are probably better off using the HTML help feature. 
-# Since the tree basically has the same information as the tab index you 
-# could consider to set DISABLE_INDEX to NO when enabling this option.
+# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
+# structure should be generated to display hierarchical information. If the tag
+# value is set to YES, a side panel will be generated containing a tree-like
+# index structure (just like the one that is generated for HTML Help). For this
+# to work a browser that supports JavaScript, DHTML, CSS and frames is required
+# (i.e. any modern browser). Windows users are probably better off using the
+# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
+# further fine-tune the look of the index. As an example, the default style
+# sheet generated by doxygen has an example that shows how to put an image at
+# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
+# the same information as the tab index, you could consider setting
+# DISABLE_INDEX to YES when enabling this option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 GENERATE_TREEVIEW      = YES
 
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values 
-# (range [0,1..20]) that doxygen will group on one line in the generated HTML 
-# documentation. Note that a value of 0 will completely suppress the enum 
-# values from appearing in the overview section.
+# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
+# doxygen will group on one line in the generated HTML documentation.
+#
+# Note that a value of 0 will completely suppress the enum values from appearing
+# in the overview section.
+# Minimum value: 0, maximum value: 20, default value: 4.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 ENUM_VALUES_PER_LINE   = 4
 
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be 
-# used to set the initial width (in pixels) of the frame in which the tree 
-# is shown.
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
+# to set the initial width (in pixels) of the frame in which the tree is shown.
+# Minimum value: 0, maximum value: 1500, default value: 250.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 TREEVIEW_WIDTH         = 250
 
-# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open 
-# links to external symbols imported via tag files in a separate window.
+# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
+# external symbols imported via tag files in a separate window.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 EXT_LINKS_IN_WINDOW    = NO
 
-# Use this tag to change the font size of Latex formulas included 
-# as images in the HTML documentation. The default is 10. Note that 
-# when you change the font size after a successful doxygen run you need 
-# to manually remove any form_*.png images from the HTML output directory 
-# to force them to be regenerated.
+# Use this tag to change the font size of LaTeX formulas included as images in
+# the HTML documentation. When you change the font size after a successful
+# doxygen run you need to manually remove any form_*.png images from the HTML
+# output directory to force them to be regenerated.
+# Minimum value: 8, maximum value: 50, default value: 10.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 FORMULA_FONTSIZE       = 10
 
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images 
-# generated for formulas are transparent PNGs. Transparent PNGs are 
-# not supported properly for IE 6.0, but are supported on all modern browsers. 
-# Note that when changing this option you need to delete any form_*.png files 
-# in the HTML output before the changes have effect.
+# Use the FORMULA_TRANSPARENT tag to determine whether or not the images
+# generated for formulas are transparent PNGs. Transparent PNGs are not
+# supported properly for IE 6.0, but are supported on all modern browsers.
+#
+# Note that when changing this option you need to delete any form_*.png files in
+# the HTML output directory before the changes have effect.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 FORMULA_TRANSPARENT    = YES
 
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax 
-# (see http://www.mathjax.org) which uses client side Javascript for the 
-# rendering instead of using prerendered bitmaps. Use this if you do not 
-# have LaTeX installed or if you want to formulas look prettier in the HTML 
-# output. When enabled you may also need to install MathJax separately and 
-# configure the path to it using the MATHJAX_RELPATH option.
+# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
+# https://www.mathjax.org) which uses client side Javascript for the rendering
+# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
+# installed or if you want to formulas look prettier in the HTML output. When
+# enabled you may also need to install MathJax separately and configure the path
+# to it using the MATHJAX_RELPATH option.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_HTML is set to YES.
 
 USE_MATHJAX            = NO
 
-# When MathJax is enabled you need to specify the location relative to the 
-# HTML output directory using the MATHJAX_RELPATH option. The destination 
-# directory should contain the MathJax.js script. For instance, if the mathjax 
-# directory is located at the same level as the HTML output directory, then 
-# MATHJAX_RELPATH should be ../mathjax. The default value points to 
-# the MathJax Content Delivery Network so you can quickly see the result without 
-# installing MathJax.  However, it is strongly recommended to install a local 
-# copy of MathJax from http://www.mathjax.org before deployment.
+# When MathJax is enabled you can set the default output format to be used for
+# the MathJax output. See the MathJax site (see:
+# http://docs.mathjax.org/en/latest/output.html) for more details.
+# Possible values are: HTML-CSS (which is slower, but has the best
+# compatibility), NativeMML (i.e. MathML) and SVG.
+# The default value is: HTML-CSS.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_FORMAT         = HTML-CSS
+
+# When MathJax is enabled you need to specify the location relative to the HTML
+# output directory using the MATHJAX_RELPATH option. The destination directory
+# should contain the MathJax.js script. For instance, if the mathjax directory
+# is located at the same level as the HTML output directory, then
+# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
+# Content Delivery Network so you can quickly see the result without installing
+# MathJax. However, it is strongly recommended to install a local copy of
+# MathJax from https://www.mathjax.org before deployment.
+# The default value is: https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_RELPATH        = https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/
+
+# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
+# extension names that should be enabled during MathJax rendering. For example
+# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_EXTENSIONS     =
+
+# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
+# of code that will be used on startup of the MathJax code. See the MathJax site
+# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
+# example see the documentation.
+# This tag requires that the tag USE_MATHJAX is set to YES.
+
+MATHJAX_CODEFILE       =
+
+# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
+# the HTML output. The underlying search engine uses javascript and DHTML and
+# should work on any modern browser. Note that when using HTML help
+# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
+# there is already a search function so this one should typically be disabled.
+# For large projects the javascript based search engine can be slow, then
+# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
+# search using the keyboard; to jump to the search box use <access key> + S
+# (what the <access key> is depends on the OS and browser, but it is typically
+# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
+# key> to jump into the search results window, the results can be navigated
+# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
+# the search. The filter options can be selected when the cursor is inside the
+# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
+# to select a filter and <Enter> or <escape> to activate or cancel the filter
+# option.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_HTML is set to YES.
+
+SEARCHENGINE           = YES
+
+# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
+# implemented using a web server instead of a web client using Javascript. There
+# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
+# setting. When disabled, doxygen will generate a PHP script for searching and
+# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
+# and searching needs to be provided by external tools. See the section
+# "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
 
-MATHJAX_RELPATH        = http://cdn.mathjax.org/mathjax/latest
+SERVER_BASED_SEARCH    = NO
 
-# The MATHJAX_EXTENSIONS tag can be used to specify one or MathJax extension 
-# names that should be enabled during MathJax rendering.
+# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
+# script for searching. Instead the search results are written to an XML file
+# which needs to be processed by an external indexer. Doxygen will invoke an
+# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
+# search results.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: https://xapian.org/).
+#
+# See the section "External Indexing and Searching" for details.
+# The default value is: NO.
+# This tag requires that the tag SEARCHENGINE is set to YES.
 
-MATHJAX_EXTENSIONS     = 
+EXTERNAL_SEARCH        = NO
 
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box 
-# for the HTML output. The underlying search engine uses javascript 
-# and DHTML and should work on any modern browser. Note that when using 
-# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets 
-# (GENERATE_DOCSET) there is already a search function so this one should 
-# typically be disabled. For large projects the javascript based search engine 
-# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution.
+# The SEARCHENGINE_URL should point to a search engine hosted by a web server
+# which will return the search results when EXTERNAL_SEARCH is enabled.
+#
+# Doxygen ships with an example indexer (doxyindexer) and search engine
+# (doxysearch.cgi) which are based on the open source search engine library
+# Xapian (see: https://xapian.org/). See the section "External Indexing and
+# Searching" for details.
+# This tag requires that the tag SEARCHENGINE is set to YES.
 
-SEARCHENGINE           = YES
+SEARCHENGINE_URL       =
 
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be 
-# implemented using a PHP enabled web server instead of at the web client 
-# using Javascript. Doxygen will generate the search PHP script and index 
-# file to put on the web server. The advantage of the server 
-# based approach is that it scales better to large projects and allows 
-# full text search. The disadvantages are that it is more difficult to setup 
-# and does not have live searching capabilities.
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
+# search data is written to a file for indexing by an external tool. With the
+# SEARCHDATA_FILE tag the name of this file can be specified.
+# The default file is: searchdata.xml.
+# This tag requires that the tag SEARCHENGINE is set to YES.
 
-SERVER_BASED_SEARCH    = NO
+SEARCHDATA_FILE        = searchdata.xml
+
+# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
+# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
+# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
+# projects and redirect the results back to the right project.
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTERNAL_SEARCH_ID     =
+
+# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
+# projects other than the one defined by this configuration file, but that are
+# all added to the same external search index. Each project needs to have a
+# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
+# to a relative location where the documentation can be found. The format is:
+# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
+# This tag requires that the tag SEARCHENGINE is set to YES.
+
+EXTRA_SEARCH_MAPPINGS  =
 
 #---------------------------------------------------------------------------
-# configuration options related to the LaTeX output
+# Configuration options related to the LaTeX output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will 
-# generate Latex output.
+# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
+# The default value is: YES.
 
-GENERATE_LATEX         = YES
+GENERATE_LATEX         = NO
 
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. 
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be 
-# put in front of it. If left blank `latex' will be used as the default path.
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: latex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 LATEX_OUTPUT           = latex
 
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be 
-# invoked. If left blank `latex' will be used as the default command name. 
-# Note that when enabling USE_PDFLATEX this option is only used for 
-# generating bitmaps for formulas in the HTML output, but not in the 
-# Makefile that is written to the output directory.
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked.
+#
+# Note that when not enabling USE_PDFLATEX the default is latex when enabling
+# USE_PDFLATEX the default is pdflatex and when in the later case latex is
+# chosen this is overwritten by pdflatex. For specific output languages the
+# default can have been set differently, this depends on the implementation of
+# the output language.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_CMD_NAME         =
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
+# index for LaTeX.
+# Note: This tag is used in the Makefile / make.bat.
+# See also: LATEX_MAKEINDEX_CMD for the part in the generated output file
+# (.tex).
+# The default file is: makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
-LATEX_CMD_NAME         = latex
+MAKEINDEX_CMD_NAME     = makeindex
 
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to 
-# generate index for LaTeX. If left blank `makeindex' will be used as the 
-# default command name.
+# The LATEX_MAKEINDEX_CMD tag can be used to specify the command name to
+# generate index for LaTeX.
+# Note: This tag is used in the generated output file (.tex).
+# See also: MAKEINDEX_CMD_NAME for the part in the Makefile / make.bat.
+# The default value is: \makeindex.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
-MAKEINDEX_CMD_NAME     = makeindex
+LATEX_MAKEINDEX_CMD    = \makeindex
 
-# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact 
-# LaTeX documents. This may be useful for small projects and may help to 
-# save some trees in general.
+# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 COMPACT_LATEX          = NO
 
-# The PAPER_TYPE tag can be used to set the paper type that is used 
-# by the printer. Possible values are: a4, letter, legal and 
-# executive. If left blank a4wide will be used.
+# The PAPER_TYPE tag can be used to set the paper type that is used by the
+# printer.
+# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
+# 14 inches) and executive (7.25 x 10.5 inches).
+# The default value is: a4.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 PAPER_TYPE             = a4
 
-# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX 
-# packages that should be included in the LaTeX output.
+# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
+# that should be included in the LaTeX output. The package can be specified just
+# by its name or with the correct syntax as to be used with the LaTeX
+# \usepackage command. To get the times font for instance you can specify :
+# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
+# To use the option intlimits with the amsmath package you can specify:
+# EXTRA_PACKAGES=[intlimits]{amsmath}
+# If left blank no extra packages will be included.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+EXTRA_PACKAGES         =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
+# generated LaTeX document. The header should contain everything until the first
+# chapter. If it is left blank doxygen will generate a standard header. See
+# section "Doxygen usage" for information on how to let doxygen write the
+# default header to a separate file.
+#
+# Note: Only use a user-defined header if you know what you are doing! The
+# following commands have a special meaning inside the header: $title,
+# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
+# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
+# string, for the replacement values of the other commands the user is referred
+# to HTML_HEADER.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_HEADER           =
+
+# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
+# generated LaTeX document. The footer should contain everything after the last
+# chapter. If it is left blank doxygen will generate a standard footer. See
+# LATEX_HEADER for more information on how to generate a default footer and what
+# special commands can be used inside the footer.
+#
+# Note: Only use a user-defined footer if you know what you are doing!
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
-EXTRA_PACKAGES         = 
+LATEX_FOOTER           =
 
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for 
-# the generated latex document. The header should contain everything until 
-# the first chapter. If it is left blank doxygen will generate a 
-# standard header. Notice: only use this tag if you know what you are doing!
+# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
+# LaTeX style sheets that are included after the standard style sheets created
+# by doxygen. Using this option one can overrule certain style aspects. Doxygen
+# will copy the style sheet files to the output directory.
+# Note: The order of the extra style sheet files is of importance (e.g. the last
+# style sheet in the list overrules the setting of the previous ones in the
+# list).
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
-LATEX_HEADER           = 
+LATEX_EXTRA_STYLESHEET =
 
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for 
-# the generated latex document. The footer should contain everything after 
-# the last chapter. If it is left blank doxygen will generate a 
-# standard footer. Notice: only use this tag if you know what you are doing!
+# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
+# other source files which should be copied to the LATEX_OUTPUT output
+# directory. Note that the files will be copied as-is; there are no commands or
+# markers available.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
-LATEX_FOOTER           = 
+LATEX_EXTRA_FILES      =
 
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated 
-# is prepared for conversion to pdf (using ps2pdf). The pdf file will 
-# contain links (just like the HTML output) instead of page references 
-# This makes the output suitable for online browsing using a pdf viewer.
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
+# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
+# contain links (just like the HTML output) instead of page references. This
+# makes the output suitable for online browsing using a PDF viewer.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 PDF_HYPERLINKS         = YES
 
-# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of 
-# plain latex in the generated Makefile. Set this option to YES to get a 
+# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
+# the PDF file directly from the LaTeX files. Set this option to YES, to get a
 # higher quality PDF documentation.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 USE_PDFLATEX           = YES
 
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. 
-# command to the generated LaTeX files. This will instruct LaTeX to keep 
-# running if errors occur, instead of asking the user for help. 
-# This option is also used when generating formulas in HTML.
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
+# command to the generated LaTeX files. This will instruct LaTeX to keep running
+# if errors occur, instead of asking the user for help. This option is also used
+# when generating formulas in HTML.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 LATEX_BATCHMODE        = NO
 
-# If LATEX_HIDE_INDICES is set to YES then doxygen will not 
-# include the index chapters (such as File Index, Compound Index, etc.) 
-# in the output.
+# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
+# index chapters (such as File Index, Compound Index, etc.) in the output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 LATEX_HIDE_INDICES     = NO
 
-# If LATEX_SOURCE_CODE is set to YES then doxygen will include 
-# source code with syntax highlighting in the LaTeX output. 
-# Note that which sources are shown also depends on other settings 
-# such as SOURCE_BROWSER.
+# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
+# code with syntax highlighting in the LaTeX output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 LATEX_SOURCE_CODE      = NO
 
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the 
-# bibliography, e.g. plainnat, or ieeetr. The default style is "plain". See 
-# http://en.wikipedia.org/wiki/BibTeX for more info.
+# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
+# bibliography, e.g. plainnat, or ieeetr. See
+# https://en.wikipedia.org/wiki/BibTeX and \cite for more info.
+# The default value is: plain.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
 
 LATEX_BIB_STYLE        = plain
 
+# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
+# page will contain the date and time when the page was generated. Setting this
+# to NO can help when comparing the output of multiple runs.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_TIMESTAMP        = NO
+
+# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute)
+# path from which the emoji images will be read. If a relative path is entered,
+# it will be relative to the LATEX_OUTPUT directory. If left blank the
+# LATEX_OUTPUT directory will be used.
+# This tag requires that the tag GENERATE_LATEX is set to YES.
+
+LATEX_EMOJI_DIRECTORY  =
+
 #---------------------------------------------------------------------------
-# configuration options related to the RTF output
+# Configuration options related to the RTF output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output 
-# The RTF output is optimized for Word 97 and may not look very pretty with 
-# other RTF readers or editors.
+# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
+# RTF output is optimized for Word 97 and may not look too pretty with other RTF
+# readers/editors.
+# The default value is: NO.
 
 GENERATE_RTF           = NO
 
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. 
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be 
-# put in front of it. If left blank `rtf' will be used as the default path.
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: rtf.
+# This tag requires that the tag GENERATE_RTF is set to YES.
 
 RTF_OUTPUT             = rtf
 
-# If the COMPACT_RTF tag is set to YES Doxygen generates more compact 
-# RTF documents. This may be useful for small projects and may help to 
-# save some trees in general.
+# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
+# documents. This may be useful for small projects and may help to save some
+# trees in general.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
 
 COMPACT_RTF            = NO
 
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated 
-# will contain hyperlink fields. The RTF file will 
-# contain links (just like the HTML output) instead of page references. 
-# This makes the output suitable for online browsing using WORD or other 
-# programs which support those fields. 
-# Note: wordpad (write) and others do not support links.
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
+# contain hyperlink fields. The RTF file will contain links (just like the HTML
+# output) instead of page references. This makes the output suitable for online
+# browsing using Word or some other Word compatible readers that support those
+# fields.
+#
+# Note: WordPad (write) and others do not support links.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
 
 RTF_HYPERLINKS         = NO
 
-# Load style sheet definitions from file. Syntax is similar to doxygen's 
-# config file, i.e. a series of assignments. You only have to provide 
+# Load stylesheet definitions from file. Syntax is similar to doxygen's
+# configuration file, i.e. a series of assignments. You only have to provide
 # replacements, missing definitions are set to their default value.
+#
+# See also section "Doxygen usage" for information on how to generate the
+# default style sheet that doxygen normally uses.
+# This tag requires that the tag GENERATE_RTF is set to YES.
+
+RTF_STYLESHEET_FILE    =
 
-RTF_STYLESHEET_FILE    = 
+# Set optional variables used in the generation of an RTF document. Syntax is
+# similar to doxygen's configuration file. A template extensions file can be
+# generated using doxygen -e rtf extensionFile.
+# This tag requires that the tag GENERATE_RTF is set to YES.
 
-# Set optional variables used in the generation of an rtf document. 
-# Syntax is similar to doxygen's config file.
+RTF_EXTENSIONS_FILE    =
+
+# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
+# with syntax highlighting in the RTF output.
+#
+# Note that which sources are shown also depends on other settings such as
+# SOURCE_BROWSER.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_RTF is set to YES.
 
-RTF_EXTENSIONS_FILE    = 
+RTF_SOURCE_CODE        = NO
 
 #---------------------------------------------------------------------------
-# configuration options related to the man page output
+# Configuration options related to the man page output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_MAN tag is set to YES (the default) Doxygen will 
-# generate man pages
+# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
+# classes and files.
+# The default value is: NO.
 
 GENERATE_MAN           = NO
 
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. 
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be 
-# put in front of it. If left blank `man' will be used as the default path.
+# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it. A directory man3 will be created inside the directory specified by
+# MAN_OUTPUT.
+# The default directory is: man.
+# This tag requires that the tag GENERATE_MAN is set to YES.
 
 MAN_OUTPUT             = man
 
-# The MAN_EXTENSION tag determines the extension that is added to 
-# the generated man pages (default is the subroutine's section .3)
+# The MAN_EXTENSION tag determines the extension that is added to the generated
+# man pages. In case the manual section does not start with a number, the number
+# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
+# optional.
+# The default value is: .3.
+# This tag requires that the tag GENERATE_MAN is set to YES.
 
 MAN_EXTENSION          = .3
 
-# If the MAN_LINKS tag is set to YES and Doxygen generates man output, 
-# then it will generate one additional man file for each entity 
-# documented in the real man page(s). These additional files 
-# only source the real man page, but without them the man command 
-# would be unable to find the correct page. The default is NO.
+# The MAN_SUBDIR tag determines the name of the directory created within
+# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
+# MAN_EXTENSION with the initial . removed.
+# This tag requires that the tag GENERATE_MAN is set to YES.
+
+MAN_SUBDIR             =
+
+# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
+# will generate one additional man file for each entity documented in the real
+# man page(s). These additional files only source the real man page, but without
+# them the man command would be unable to find the correct page.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_MAN is set to YES.
 
 MAN_LINKS              = NO
 
 #---------------------------------------------------------------------------
-# configuration options related to the XML output
+# Configuration options related to the XML output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_XML tag is set to YES Doxygen will 
-# generate an XML file that captures the structure of 
-# the code including all documentation.
+# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
+# captures the structure of the code including all documentation.
+# The default value is: NO.
 
 GENERATE_XML           = NO
 
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. 
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be 
-# put in front of it. If left blank `xml' will be used as the default path.
+# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
+# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
+# it.
+# The default directory is: xml.
+# This tag requires that the tag GENERATE_XML is set to YES.
 
 XML_OUTPUT             = xml
 
-# The XML_SCHEMA tag can be used to specify an XML schema, 
-# which can be used by a validating XML parser to check the 
-# syntax of the XML files.
+# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
+# listings (including syntax highlighting and cross-referencing information) to
+# the XML output. Note that enabling this will significantly increase the size
+# of the XML output.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_XML is set to YES.
+
+XML_PROGRAMLISTING     = YES
 
-XML_SCHEMA             = 
+# If the XML_NS_MEMB_FILE_SCOPE tag is set to YES, doxygen will include
+# namespace members in file scope as well, matching the HTML output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_XML is set to YES.
 
-# The XML_DTD tag can be used to specify an XML DTD, 
-# which can be used by a validating XML parser to check the 
-# syntax of the XML files.
+XML_NS_MEMB_FILE_SCOPE = NO
 
-XML_DTD                = 
+#---------------------------------------------------------------------------
+# Configuration options related to the DOCBOOK output
+#---------------------------------------------------------------------------
 
-# If the XML_PROGRAMLISTING tag is set to YES Doxygen will 
-# dump the program listings (including syntax highlighting 
-# and cross-referencing information) to the XML output. Note that 
-# enabling this will significantly increase the size of the XML output.
+# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
+# that can be used to generate PDF.
+# The default value is: NO.
 
-XML_PROGRAMLISTING     = YES
+GENERATE_DOCBOOK       = NO
+
+# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
+# front of it.
+# The default directory is: docbook.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_OUTPUT         = docbook
+
+# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
+# program listings (including syntax highlighting and cross-referencing
+# information) to the DOCBOOK output. Note that enabling this will significantly
+# increase the size of the DOCBOOK output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
+
+DOCBOOK_PROGRAMLISTING = NO
 
 #---------------------------------------------------------------------------
-# configuration options for the AutoGen Definitions output
+# Configuration options for the AutoGen Definitions output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will 
-# generate an AutoGen Definitions (see autogen.sf.net) file 
-# that captures the structure of the code including all 
-# documentation. Note that this feature is still experimental 
-# and incomplete at the moment.
+# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
+# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures
+# the structure of the code including all documentation. Note that this feature
+# is still experimental and incomplete at the moment.
+# The default value is: NO.
 
 GENERATE_AUTOGEN_DEF   = NO
 
 #---------------------------------------------------------------------------
-# configuration options related to the Perl module output
+# Configuration options related to the Perl module output
 #---------------------------------------------------------------------------
 
-# If the GENERATE_PERLMOD tag is set to YES Doxygen will 
-# generate a Perl module file that captures the structure of 
-# the code including all documentation. Note that this 
-# feature is still experimental and incomplete at the 
-# moment.
+# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
+# file that captures the structure of the code including all documentation.
+#
+# Note that this feature is still experimental and incomplete at the moment.
+# The default value is: NO.
 
 GENERATE_PERLMOD       = NO
 
-# If the PERLMOD_LATEX tag is set to YES Doxygen will generate 
-# the necessary Makefile rules, Perl scripts and LaTeX code to be able 
-# to generate PDF and DVI output from the Perl module output.
+# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
+# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
+# output from the Perl module output.
+# The default value is: NO.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
 
 PERLMOD_LATEX          = NO
 
-# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be 
-# nicely formatted so it can be parsed by a human reader.  This is useful 
-# if you want to understand what is going on.  On the other hand, if this 
-# tag is set to NO the size of the Perl module output will be much smaller 
-# and Perl will parse it just the same.
+# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
+# formatted so it can be parsed by a human reader. This is useful if you want to
+# understand what is going on. On the other hand, if this tag is set to NO, the
+# size of the Perl module output will be much smaller and Perl will parse it
+# just the same.
+# The default value is: YES.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
 
 PERLMOD_PRETTY         = YES
 
-# The names of the make variables in the generated doxyrules.make file 
-# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. 
-# This is useful so different doxyrules.make files included by the same 
-# Makefile don't overwrite each other's variables.
+# The names of the make variables in the generated doxyrules.make file are
+# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
+# so different doxyrules.make files included by the same Makefile don't
+# overwrite each other's variables.
+# This tag requires that the tag GENERATE_PERLMOD is set to YES.
 
-PERLMOD_MAKEVAR_PREFIX = 
+PERLMOD_MAKEVAR_PREFIX =
 
 #---------------------------------------------------------------------------
 # Configuration options related to the preprocessor
 #---------------------------------------------------------------------------
 
-# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will 
-# evaluate all C-preprocessor directives found in the sources and include 
-# files.
+# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
+# C-preprocessor directives found in the sources and include files.
+# The default value is: YES.
 
 ENABLE_PREPROCESSING   = YES
 
-# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro 
-# names in the source code. If set to NO (the default) only conditional 
-# compilation will be performed. Macro expansion can be done in a controlled 
-# way by setting EXPAND_ONLY_PREDEF to YES.
+# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
+# in the source code. If set to NO, only conditional compilation will be
+# performed. Macro expansion can be done in a controlled way by setting
+# EXPAND_ONLY_PREDEF to YES.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
 MACRO_EXPANSION        = NO
 
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES 
-# then the macro expansion is limited to the macros specified with the 
-# PREDEFINED and EXPAND_AS_DEFINED tags.
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
+# the macro expansion is limited to the macros specified with the PREDEFINED and
+# EXPAND_AS_DEFINED tags.
+# The default value is: NO.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
 EXPAND_ONLY_PREDEF     = NO
 
-# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files 
-# pointed to by INCLUDE_PATH will be searched when a #include is found.
+# If the SEARCH_INCLUDES tag is set to YES, the include files in the
+# INCLUDE_PATH will be searched if a #include is found.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
 SEARCH_INCLUDES        = YES
 
-# The INCLUDE_PATH tag can be used to specify one or more directories that 
-# contain include files that are not input files but should be processed by 
-# the preprocessor.
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by the
+# preprocessor.
+# This tag requires that the tag SEARCH_INCLUDES is set to YES.
 
-INCLUDE_PATH           = 
+INCLUDE_PATH           =
 
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard 
-# patterns (like *.h and *.hpp) to filter out the header-files in the 
-# directories. If left blank, the patterns specified with FILE_PATTERNS will 
-# be used.
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will be
+# used.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
-INCLUDE_FILE_PATTERNS  = 
+INCLUDE_FILE_PATTERNS  =
 
-# The PREDEFINED tag can be used to specify one or more macro names that 
-# are defined before the preprocessor is started (similar to the -D option of 
-# gcc). The argument of the tag is a list of macros of the form: name 
-# or name=definition (no spaces). If the definition and the = are 
-# omitted =1 is assumed. To prevent a macro definition from being 
-# undefined via #undef or recursively expanded use the := operator 
-# instead of the = operator.
+# The PREDEFINED tag can be used to specify one or more macro names that are
+# defined before the preprocessor is started (similar to the -D option of e.g.
+# gcc). The argument of the tag is a list of macros of the form: name or
+# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
+# is assumed. To prevent a macro definition from being undefined via #undef or
+# recursively expanded use the := operator instead of the = operator.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
-PREDEFINED             = 
+PREDEFINED             =
 
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then 
-# this tag can be used to specify a list of macro names that should be expanded. 
-# The macro definition that is found in the sources will be used. 
-# Use the PREDEFINED tag if you want to use a different macro definition that 
-# overrules the definition found in the source code.
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
+# tag can be used to specify a list of macro names that should be expanded. The
+# macro definition that is found in the sources will be used. Use the PREDEFINED
+# tag if you want to use a different macro definition that overrules the
+# definition found in the source code.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
-EXPAND_AS_DEFINED      = 
+EXPAND_AS_DEFINED      =
 
-# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then 
-# doxygen's preprocessor will remove all references to function-like macros 
-# that are alone on a line, have an all uppercase name, and do not end with a 
-# semicolon, because these will confuse the parser if not removed.
+# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
+# remove all references to function-like macros that are alone on a line, have
+# an all uppercase name, and do not end with a semicolon. Such function macros
+# are typically used for boiler-plate code, and will confuse the parser if not
+# removed.
+# The default value is: YES.
+# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
 
 SKIP_FUNCTION_MACROS   = YES
 
 #---------------------------------------------------------------------------
-# Configuration::additions related to external references
+# Configuration options related to external references
 #---------------------------------------------------------------------------
 
-# The TAGFILES option can be used to specify one or more tagfiles. For each 
-# tag file the location of the external documentation should be added. The 
-# format of a tag file without this location is as follows: 
-#   TAGFILES = file1 file2 ... 
-# Adding location for the tag files is done as follows: 
-#   TAGFILES = file1=loc1 "file2 = loc2" ... 
-# where "loc1" and "loc2" can be relative or absolute paths 
-# or URLs. Note that each tag file must have a unique name (where the name does 
-# NOT include the path). If a tag file is not located in the directory in which 
-# doxygen is run, you must also specify the path to the tagfile here.
+# The TAGFILES tag can be used to specify one or more tag files. For each tag
+# file the location of the external documentation should be added. The format of
+# a tag file without this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where loc1 and loc2 can be relative or absolute paths or URLs. See the
+# section "Linking to external documentation" for more information about the use
+# of tag files.
+# Note: Each tag file must have a unique name (where the name does NOT include
+# the path). If a tag file is not located in the directory in which doxygen is
+# run, you must also specify the path to the tagfile here.
 
-TAGFILES               = 
+TAGFILES               =
 
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create 
-# a tag file that is based on the input files it reads.
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
+# tag file that is based on the input files it reads. See section "Linking to
+# external documentation" for more information about the usage of tag files.
 
-GENERATE_TAGFILE       = 
+GENERATE_TAGFILE       =
 
-# If the ALLEXTERNALS tag is set to YES all external classes will be listed 
-# in the class index. If set to NO only the inherited external classes 
-# will be listed.
+# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
+# the class index. If set to NO, only the inherited external classes will be
+# listed.
+# The default value is: NO.
 
 ALLEXTERNALS           = NO
 
-# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed 
-# in the modules index. If set to NO, only the current project's groups will 
-# be listed.
+# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will be
+# listed.
+# The default value is: YES.
 
 EXTERNAL_GROUPS        = YES
 
-# The PERL_PATH should be the absolute path and name of the perl script 
-# interpreter (i.e. the result of `which perl').
+# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
+# the related pages index. If set to NO, only the current project's pages will
+# be listed.
+# The default value is: YES.
+
+EXTERNAL_PAGES         = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of 'which perl').
+# The default file (with absolute path) is: /usr/bin/perl.
 
 PERL_PATH              = /usr/bin/perl
 
@@ -1605,222 +2223,317 @@ PERL_PATH              = /usr/bin/perl
 # Configuration options related to the dot tool
 #---------------------------------------------------------------------------
 
-# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will 
-# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base 
-# or super classes. Setting the tag to NO turns the diagrams off. Note that 
-# this option also works with HAVE_DOT disabled, but it is recommended to 
-# install and use dot, since it yields more powerful graphs.
+# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
+# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
+# NO turns the diagrams off. Note that this option also works with HAVE_DOT
+# disabled, but it is recommended to install and use dot, since it yields more
+# powerful graphs.
+# The default value is: YES.
 
 CLASS_DIAGRAMS         = YES
 
-# You can define message sequence charts within doxygen comments using the \msc 
-# command. Doxygen will then run the mscgen tool (see 
-# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the 
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where 
-# the mscgen tool resides. If left empty the tool is assumed to be found in the 
+# You can define message sequence charts within doxygen comments using the \msc
+# command. Doxygen will then run the mscgen tool (see:
+# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
+# documentation. The MSCGEN_PATH tag allows you to specify the directory where
+# the mscgen tool resides. If left empty the tool is assumed to be found in the
 # default search path.
 
-MSCGEN_PATH            = 
+MSCGEN_PATH            =
+
+# You can include diagrams made with dia in doxygen documentation. Doxygen will
+# then run dia to produce the diagram and insert it in the documentation. The
+# DIA_PATH tag allows you to specify the directory where the dia binary resides.
+# If left empty dia is assumed to be found in the default search path.
 
-# If set to YES, the inheritance and collaboration graphs will hide 
-# inheritance and usage relations if the target is undocumented 
-# or is not a class.
+DIA_PATH               =
+
+# If set to YES the inheritance and collaboration graphs will hide inheritance
+# and usage relations if the target is undocumented or is not a class.
+# The default value is: YES.
 
 HIDE_UNDOC_RELATIONS   = YES
 
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is 
-# available from the path. This tool is part of Graphviz, a graph visualization 
-# toolkit from AT&T and Lucent Bell Labs. The other options in this section 
-# have no effect if this option is set to NO (the default)
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz (see:
+# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
+# Bell Labs. The other options in this section have no effect if this option is
+# set to NO
+# The default value is: NO.
 
 HAVE_DOT               = NO
 
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is 
-# allowed to run in parallel. When set to 0 (the default) doxygen will 
-# base this on the number of processors available in the system. You can set it 
-# explicitly to a value larger than 0 to get control over the balance 
-# between CPU load and processing speed.
+# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
+# to run in parallel. When set to 0 doxygen will base this on the number of
+# processors available in the system. You can set it explicitly to a value
+# larger than 0 to get control over the balance between CPU load and processing
+# speed.
+# Minimum value: 0, maximum value: 32, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_NUM_THREADS        = 0
 
-# By default doxygen will use the Helvetica font for all dot files that 
-# doxygen generates. When you want a differently looking font you can specify 
-# the font name using DOT_FONTNAME. You need to make sure dot is able to find 
-# the font, which can be done by putting it in a standard location or by setting 
-# the DOTFONTPATH environment variable or by setting DOT_FONTPATH to the 
-# directory containing the font.
+# When you want a differently looking font in the dot files that doxygen
+# generates you can specify the font name using DOT_FONTNAME. You need to make
+# sure dot is able to find the font, which can be done by putting it in a
+# standard location or by setting the DOTFONTPATH environment variable or by
+# setting DOT_FONTPATH to the directory containing the font.
+# The default value is: Helvetica.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_FONTNAME           = Helvetica
 
-# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. 
-# The default size is 10pt.
+# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
+# dot graphs.
+# Minimum value: 4, maximum value: 24, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_FONTSIZE           = 10
 
-# By default doxygen will tell dot to use the Helvetica font. 
-# If you specify a different font using DOT_FONTNAME you can use DOT_FONTPATH to 
-# set the path where dot can find it.
+# By default doxygen will tell dot to use the default font as specified with
+# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
+# the path where dot can find it using this tag.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
-DOT_FONTPATH           = 
+DOT_FONTPATH           =
 
-# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen 
-# will generate a graph for each documented class showing the direct and 
-# indirect inheritance relations. Setting this tag to YES will force the 
-# CLASS_DIAGRAMS tag to NO.
+# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
+# each documented class showing the direct and indirect inheritance relations.
+# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 CLASS_GRAPH            = YES
 
-# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen 
-# will generate a graph for each documented class showing the direct and 
-# indirect implementation dependencies (inheritance, containment, and 
-# class references variables) of the class with other documented classes.
+# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
+# graph for each documented class showing the direct and indirect implementation
+# dependencies (inheritance, containment, and class references variables) of the
+# class with other documented classes.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 COLLABORATION_GRAPH    = YES
 
-# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen 
-# will generate a graph for groups, showing the direct groups dependencies
+# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
+# groups, showing the direct groups dependencies.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 GROUP_GRAPHS           = YES
 
-# If the UML_LOOK tag is set to YES doxygen will generate inheritance and 
-# collaboration diagrams in a style similar to the OMG's Unified Modeling 
+# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
 # Language.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 UML_LOOK               = NO
 
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside 
-# the class node. If there are many fields or methods and many nodes the 
-# graph may become too big to be useful. The UML_LIMIT_NUM_FIELDS 
-# threshold limits the number of items for each type to make the size more 
-# managable. Set this to 0 for no limit. Note that the threshold may be 
-# exceeded by 50% before the limit is enforced.
+# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
+# class node. If there are many fields or methods and many nodes the graph may
+# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
+# number of items for each type to make the size more manageable. Set this to 0
+# for no limit. Note that the threshold may be exceeded by 50% before the limit
+# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
+# but if the number exceeds 15, the total amount of fields shown is limited to
+# 10.
+# Minimum value: 0, maximum value: 100, default value: 10.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 UML_LIMIT_NUM_FIELDS   = 10
 
-# If set to YES, the inheritance and collaboration graphs will show the 
-# relations between templates and their instances.
+# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
+# collaboration graphs will show the relations between templates and their
+# instances.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 TEMPLATE_RELATIONS     = NO
 
-# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT 
-# tags are set to YES then doxygen will generate a graph for each documented 
-# file showing the direct and indirect include dependencies of the file with 
-# other documented files.
+# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
+# YES then doxygen will generate a graph for each documented file showing the
+# direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 INCLUDE_GRAPH          = YES
 
-# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and 
-# HAVE_DOT tags are set to YES then doxygen will generate a graph for each 
-# documented header file showing the documented files that directly or 
-# indirectly include this file.
+# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
+# set to YES then doxygen will generate a graph for each documented file showing
+# the direct and indirect include dependencies of the file with other documented
+# files.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 INCLUDED_BY_GRAPH      = YES
 
-# If the CALL_GRAPH and HAVE_DOT options are set to YES then 
-# doxygen will generate a call dependency graph for every global function 
-# or class method. Note that enabling this option will significantly increase 
-# the time of a run. So in most cases it will be better to enable call graphs 
-# for selected functions only using the \callgraph command.
+# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command. Disabling a call graph can be
+# accomplished by means of the command \hidecallgraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 CALL_GRAPH             = NO
 
-# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then 
-# doxygen will generate a caller dependency graph for every global function 
-# or class method. Note that enabling this option will significantly increase 
-# the time of a run. So in most cases it will be better to enable caller 
-# graphs for selected functions only using the \callergraph command.
+# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
+# dependency graph for every global function or class method.
+#
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable caller graphs for selected
+# functions only using the \callergraph command. Disabling a caller graph can be
+# accomplished by means of the command \hidecallergraph.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 CALLER_GRAPH           = NO
 
-# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen 
-# will generate a graphical hierarchy of all classes instead of a textual one.
+# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
+# hierarchy of all classes instead of a textual one.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 GRAPHICAL_HIERARCHY    = YES
 
-# If the DIRECTORY_GRAPH and HAVE_DOT tags are set to YES 
-# then doxygen will show the dependencies a directory has on other directories 
-# in a graphical way. The dependency relations are determined by the #include 
-# relations between the files in the directories.
+# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
+# dependencies a directory has on other directories in a graphical way. The
+# dependency relations are determined by the #include relations between the
+# files in the directories.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DIRECTORY_GRAPH        = YES
 
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images 
-# generated by dot. Possible values are svg, png, jpg, or gif. 
-# If left blank png will be used. If you choose svg you need to set 
-# HTML_FILE_EXTENSION to xhtml in order to make the SVG files 
-# visible in IE 9+ (other browsers do not have this requirement).
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. For an explanation of the image formats see the section
+# output formats in the documentation of the dot tool (Graphviz (see:
+# http://www.graphviz.org/)).
+# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
+# to make the SVG files visible in IE 9+ (other browsers do not have this
+# requirement).
+# Possible values are: png, jpg, gif, svg, png:gd, png:gd:gd, png:cairo,
+# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
+# png:gdiplus:gdiplus.
+# The default value is: png.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_IMAGE_FORMAT       = png
 
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to 
-# enable generation of interactive SVG images that allow zooming and panning. 
-# Note that this requires a modern browser other than Internet Explorer. 
-# Tested and working are Firefox, Chrome, Safari, and Opera. For IE 9+ you 
-# need to set HTML_FILE_EXTENSION to xhtml in order to make the SVG files 
-# visible. Older versions of IE do not have SVG support.
+# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
+# enable generation of interactive SVG images that allow zooming and panning.
+#
+# Note that this requires a modern browser other than Internet Explorer. Tested
+# and working are Firefox, Chrome, Safari, and Opera.
+# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
+# the SVG files visible. Older versions of IE do not have SVG support.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 INTERACTIVE_SVG        = NO
 
-# The tag DOT_PATH can be used to specify the path where the dot tool can be 
+# The DOT_PATH tag can be used to specify the path where the dot tool can be
 # found. If left blank, it is assumed the dot tool can be found in the path.
+# This tag requires that the tag HAVE_DOT is set to YES.
+
+DOT_PATH               =
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the \dotfile
+# command).
+# This tag requires that the tag HAVE_DOT is set to YES.
 
-DOT_PATH               = 
+DOTFILE_DIRS           =
 
-# The DOTFILE_DIRS tag can be used to specify one or more directories that 
-# contain dot files that are included in the documentation (see the 
-# \dotfile command).
+# The MSCFILE_DIRS tag can be used to specify one or more directories that
+# contain msc files that are included in the documentation (see the \mscfile
+# command).
 
-DOTFILE_DIRS           = 
+MSCFILE_DIRS           =
 
-# The MSCFILE_DIRS tag can be used to specify one or more directories that 
-# contain msc files that are included in the documentation (see the 
-# \mscfile command).
+# The DIAFILE_DIRS tag can be used to specify one or more directories that
+# contain dia files that are included in the documentation (see the \diafile
+# command).
 
-MSCFILE_DIRS           = 
+DIAFILE_DIRS           =
 
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of 
-# nodes that will be shown in the graph. If the number of nodes in a graph 
-# becomes larger than this value, doxygen will truncate the graph, which is 
-# visualized by representing a node as a red box. Note that doxygen if the 
-# number of direct children of the root node in a graph is already larger than 
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note 
-# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
+# path where java can find the plantuml.jar file. If left blank, it is assumed
+# PlantUML is not used or called during a preprocessing step. Doxygen will
+# generate a warning when it encounters a \startuml command in this case and
+# will not generate output for the diagram.
+
+PLANTUML_JAR_PATH      =
+
+# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
+# configuration file for plantuml.
+
+PLANTUML_CFG_FILE      =
+
+# When using plantuml, the specified paths are searched for files specified by
+# the !include statement in a plantuml block.
+
+PLANTUML_INCLUDE_PATH  =
+
+# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
+# that will be shown in the graph. If the number of nodes in a graph becomes
+# larger than this value, doxygen will truncate the graph, which is visualized
+# by representing a node as a red box. Note that doxygen if the number of direct
+# children of the root node in a graph is already larger than
+# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
+# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
+# Minimum value: 0, maximum value: 10000, default value: 50.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_GRAPH_MAX_NODES    = 50
 
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the 
-# graphs generated by dot. A depth value of 3 means that only nodes reachable 
-# from the root by following a path via at most 3 edges will be shown. Nodes 
-# that lay further from the root node will be omitted. Note that setting this 
-# option to 1 or 2 may greatly reduce the computation time needed for large 
-# code bases. Also note that the size of a graph can be further restricted by 
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
+# generated by dot. A depth value of 3 means that only nodes reachable from the
+# root by following a path via at most 3 edges will be shown. Nodes that lay
+# further from the root node will be omitted. Note that setting this option to 1
+# or 2 may greatly reduce the computation time needed for large code bases. Also
+# note that the size of a graph can be further restricted by
 # DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
+# Minimum value: 0, maximum value: 1000, default value: 0.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 MAX_DOT_GRAPH_DEPTH    = 0
 
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent 
-# background. This is disabled by default, because dot on Windows does not 
-# seem to support this out of the box. Warning: Depending on the platform used, 
-# enabling this option may lead to badly anti-aliased labels on the edges of 
-# a graph (i.e. they become hard to read).
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, because dot on Windows does not seem
+# to support this out of the box.
+#
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_TRANSPARENT        = NO
 
-# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output 
-# files in one run (i.e. multiple -o and -T options on the command line). This 
-# makes dot run faster, but since only newer versions of dot (>1.8.10) 
-# support this, this feature is disabled by default.
+# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10) support
+# this, this feature is disabled by default.
+# The default value is: NO.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_MULTI_TARGETS      = NO
 
-# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will 
-# generate a legend page explaining the meaning of the various boxes and 
-# arrows in the dot generated graphs.
+# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
+# explaining the meaning of the various boxes and arrows in the dot generated
+# graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 GENERATE_LEGEND        = YES
 
-# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will 
-# remove the intermediate dot files that are used to generate 
-# the various graphs.
+# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
+# files that are used to generate the various graphs.
+# The default value is: YES.
+# This tag requires that the tag HAVE_DOT is set to YES.
 
 DOT_CLEANUP            = YES
diff --git a/resources/examples/testfiles/dft/cyclic.dft b/resources/examples/testfiles/dft/cyclic.dft
new file mode 100644
index 000000000..b683dcf42
--- /dev/null
+++ b/resources/examples/testfiles/dft/cyclic.dft
@@ -0,0 +1,5 @@
+toplevel "A";
+"A" and "B";
+"B" and "C";
+"C" and "A" "D";
+"D" lambda=0.5 dorm=0.3;
\ No newline at end of file
diff --git a/resources/examples/testfiles/dft/fdep_bound.dft b/resources/examples/testfiles/dft/fdep_bound.dft
new file mode 100644
index 000000000..4164d52a3
--- /dev/null
+++ b/resources/examples/testfiles/dft/fdep_bound.dft
@@ -0,0 +1,15 @@
+toplevel "A";
+"A" or "B" "C";
+"B" and "D" "L";
+"C" and "M" "N";
+"D" and "I" "J" "K";
+"DEP1" fdep "T" "I" "J" "K";
+"DEP2" fdep "D" "L";
+
+"I" lambda=0.5 dorm=0;
+"J" lambda=0.5 dorm=0.5;
+"K" lambda=0.5 dorm=0.5;
+"L" lambda=0.5 dorm=0.5;
+"M" lambda=0.5 dorm=0.5;
+"N" lambda=0.5 dorm=0.5;
+"T" lambda=0.5 dorm=0.5;
\ No newline at end of file
diff --git a/resources/examples/testfiles/dft/hecs_2_2.dft b/resources/examples/testfiles/dft/hecs_2_2.dft
new file mode 100644
index 000000000..788efaf16
--- /dev/null
+++ b/resources/examples/testfiles/dft/hecs_2_2.dft
@@ -0,0 +1,40 @@
+toplevel "System";
+"System" or "System_1" "System_2";
+"System_1" or "PSF_1" "MSF_1" "BSF_1" "IF_1";
+"PSF_1" and "P_11" "P_12";
+"P_11" wsp "A_11" "A_1S";
+"P_12" wsp "A_12" "A_1S";
+"MSF_1" 3of5 "M_1_1" "M_1_2" "M_1_3" "M_1_4" "M_1_5";
+"BSF_1" and "BUS_11" "BUS_12";
+"IF_1" or "HW_1" "SW_1";
+"System_2" or "PSF_2" "MSF_2" "BSF_2" "IF_2";
+"PSF_2" and "P_21" "P_22";
+"P_21" wsp "A_21" "A_2S";
+"P_22" wsp "A_22" "A_2S";
+"MSF_2" 3of5 "M_2_1" "M_2_2" "M_2_3" "M_2_4" "M_2_5";
+"BSF_2" and "BUS_21" "BUS_22";
+"IF_2" or "HW_2" "SW_2";
+"A_11" lambda=1.0e-4 dorm=0;
+"A_12" lambda=1.0e-4 dorm=0;
+"A_1S" lambda=1.0e-4 dorm=0;
+"M_1_1" lambda=6.0e-5 dorm=0;
+"M_1_2" lambda=6.0e-5 dorm=0;
+"M_1_3" lambda=6.0e-5 dorm=0;
+"M_1_4" lambda=6.0e-5 dorm=0;
+"M_1_5" lambda=6.0e-5 dorm=0;
+"BUS_11" lambda=1.0e-6 dorm=0;
+"BUS_12" lambda=1.0e-6 dorm=0;
+"HW_1" lambda=5.0e-5 dorm=0;
+"SW_1" lambda=6.0e-5 dorm=0;
+"A_21" lambda=1.0e-4 dorm=0;
+"A_22" lambda=1.0e-4 dorm=0;
+"A_2S" lambda=1.0e-4 dorm=0;
+"M_2_1" lambda=6.0e-5 dorm=0;
+"M_2_2" lambda=6.0e-5 dorm=0;
+"M_2_3" lambda=6.0e-5 dorm=0;
+"M_2_4" lambda=6.0e-5 dorm=0;
+"M_2_5" lambda=6.0e-5 dorm=0;
+"BUS_21" lambda=1.0e-6 dorm=0;
+"BUS_22" lambda=1.0e-6 dorm=0;
+"HW_2" lambda=5.0e-5 dorm=0;
+"SW_2" lambda=6.0e-5 dorm=0;
diff --git a/resources/examples/testfiles/dft/seqChild.dft b/resources/examples/testfiles/dft/seqChild.dft
new file mode 100644
index 000000000..ad68072c8
--- /dev/null
+++ b/resources/examples/testfiles/dft/seqChild.dft
@@ -0,0 +1,5 @@
+toplevel "A";
+"A" and "B" "X";
+"X" seq "B" "C";
+"B" lambda=0.5 dorm=0.3;
+"C" lambda=0.5 dorm=0.3;
diff --git a/resources/examples/testfiles/dft/spare_two_modules.dft b/resources/examples/testfiles/dft/spare_two_modules.dft
new file mode 100755
index 000000000..a2cbcbe8b
--- /dev/null
+++ b/resources/examples/testfiles/dft/spare_two_modules.dft
@@ -0,0 +1,8 @@
+toplevel "A";
+"A" or "B" "C";
+"B" wsp "K" "J" "I";
+"C" wsp "L" "J" "I";
+"I" lambda=0.5 dorm=0.5;
+"J" lambda=1 dorm=0.5;
+"K" lambda=0.5 dorm=0.5;
+"L" lambda=0.5 dorm=0.5;
diff --git a/resources/examples/testfiles/dft/symmetry6.dft b/resources/examples/testfiles/dft/symmetry6.dft
new file mode 100644
index 000000000..9fa6a837d
--- /dev/null
+++ b/resources/examples/testfiles/dft/symmetry6.dft
@@ -0,0 +1,24 @@
+toplevel "A";
+"A" or "B" "C";
+"B" and "J" "K" "L";
+"J" or "J1" "J2";
+"K" or "K1" "K2";
+"L" or "L1" "L2";
+"C" or "M" "N";
+"M" and "M1" "M2" "M3" "M4";
+"N" and "N1" "N2" "N3" "N4";
+"J1" lambda=0.5 dorm=0;
+"J2" lambda=0.5 dorm=0;
+"K1" lambda=0.5 dorm=0;
+"K2" lambda=0.5 dorm=0;
+"L1" lambda=0.5 dorm=0;
+"L2" lambda=0.5 dorm=0;
+"M1" lambda=0.5 dorm=0;
+"M2" lambda=0.5 dorm=0;
+"M3" lambda=1 dorm=0;
+"M4" lambda=1 dorm=0;
+"N1" lambda=0.5 dorm=0;
+"N2" lambda=0.5 dorm=0;
+"N3" lambda=1 dorm=0;
+"N4" lambda=1 dorm=0;
+
diff --git a/src/storm-dft-cli/storm-dft.cpp b/src/storm-dft-cli/storm-dft.cpp
index 533da8bf3..591f2ee2b 100644
--- a/src/storm-dft-cli/storm-dft.cpp
+++ b/src/storm-dft-cli/storm-dft.cpp
@@ -51,6 +51,12 @@ void processOptions() {
         storm::api::exportDFTToJsonFile<ValueType>(*dft, dftIOSettings.getExportJsonFilename());
     }
 
+    if (dftIOSettings.isExportToSmt()) {
+        // Export to json
+        storm::api::exportDFTToSMT<ValueType>(*dft, dftIOSettings.getExportSmtFilename());
+        return;
+    }
+
     // Check well-formedness of DFT
     std::stringstream stream;
     if (!dft->checkWellFormedness(stream)) {
@@ -76,8 +82,7 @@ void processOptions() {
     if (faultTreeSettings.solveWithSMT()) {
         // Solve with SMT
         STORM_LOG_DEBUG("Running DFT analysis with use of SMT");
-        storm::api::exportDFTToSMT(*dft, "test.smt2");
-        STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Only exported to SMT file 'test.smt2' but analysis is not supported.");
+        storm::api::analyzeDFTSMT(*dft, true);
         return;
     }
 #endif
@@ -148,8 +153,8 @@ void processOptions() {
     // Add relevant event names from properties
     for (auto atomic : atomicLabels) {
         std::string label = atomic->getLabel();
-        if (label == "failed") {
-            // Ignore as this label will always be added
+        if (label == "failed" or label == "skipped") {
+            // Ignore as these label will always be added if necessary
         } else {
             // Get name of event
             if (boost::ends_with(label, "_failed")) {
diff --git a/src/storm-dft/api/storm-dft.cpp b/src/storm-dft/api/storm-dft.cpp
index bae04d1e0..623d75a16 100644
--- a/src/storm-dft/api/storm-dft.cpp
+++ b/src/storm-dft/api/storm-dft.cpp
@@ -42,6 +42,39 @@ namespace storm {
             STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "Export to SMT does not support this data type.");
         }
 
+        template<>
+        std::vector<storm::solver::SmtSolver::CheckResult>
+        analyzeDFTSMT(storm::storage::DFT<double> const &dft, bool printOutput) {
+            storm::modelchecker::DFTASFChecker smtChecker(dft);
+            smtChecker.toSolver();
+            std::vector<storm::solver::SmtSolver::CheckResult> results;
+
+            results.push_back(smtChecker.checkTleNeverFailed());
+            uint64_t lower_bound = smtChecker.getLeastFailureBound();
+            uint64_t upper_bound = smtChecker.getAlwaysFailedBound();
+            if (printOutput) {
+                // TODO add suitable output function, maybe add query descriptions for better readability
+                for (size_t i = 0; i < results.size(); ++i) {
+                    std::string tmp = "unknown";
+                    if (results.at(i) == storm::solver::SmtSolver::CheckResult::Sat) {
+                        tmp = "SAT";
+                    } else if (results.at(i) == storm::solver::SmtSolver::CheckResult::Unsat) {
+                        tmp = "UNSAT";
+                    }
+                }
+                std::cout << "Lower bound: " << std::to_string(lower_bound) << std::endl;
+                std::cout << "Upper bound: " << std::to_string(upper_bound) << std::endl;
+            }
+            return results;
+        }
+
+        template<>
+        std::vector<storm::solver::SmtSolver::CheckResult>
+        analyzeDFTSMT(storm::storage::DFT<storm::RationalFunction> const &dft, bool printOutput) {
+            STORM_LOG_THROW(false, storm::exceptions::NotSupportedException,
+                            "Analysis by SMT not supported for this data type.");
+        }
+
         template<>
         std::pair<std::shared_ptr<storm::gspn::GSPN>, uint64_t> transformToGSPN(storm::storage::DFT<double> const& dft) {
             storm::settings::modules::FaultTreeSettings const& ftSettings = storm::settings::getModule<storm::settings::modules::FaultTreeSettings>();
diff --git a/src/storm-dft/api/storm-dft.h b/src/storm-dft/api/storm-dft.h
index 754bdfc91..0388e9610 100644
--- a/src/storm-dft/api/storm-dft.h
+++ b/src/storm-dft/api/storm-dft.h
@@ -92,6 +92,17 @@ namespace storm {
             return results;
         }
 
+        /*!
+         * Analyze the DFT using the SMT encoding
+         *
+         * @param dft DFT.
+         *
+         * @return Result result vector
+         */
+        template<typename ValueType>
+        std::vector<storm::solver::SmtSolver::CheckResult>
+        analyzeDFTSMT(storm::storage::DFT<ValueType> const &dft, bool printOutput);
+
         /*!
          * Export DFT to JSON file.
          *
diff --git a/src/storm-dft/builder/DFTBuilder.cpp b/src/storm-dft/builder/DFTBuilder.cpp
index fce5d7962..9bb40a8b6 100644
--- a/src/storm-dft/builder/DFTBuilder.cpp
+++ b/src/storm-dft/builder/DFTBuilder.cpp
@@ -27,6 +27,9 @@ namespace storm {
                     if (itFind != mElements.end()) {
                         // Child found
                         DFTElementPointer childElement = itFind->second;
+                        STORM_LOG_THROW(!childElement->isRestriction(), storm::exceptions::WrongFormatException,
+                                        "Restictor " << childElement->name() << " is not allowed as child of gate "
+                                                     << gate->name());
                         if(!childElement->isDependency()) {
                             gate->pushBackChild(childElement);
                             childElement->addParent(gate);
@@ -81,14 +84,13 @@ namespace storm {
                 }
                 
             }
-            
 
             // Sort elements topologically
+            DFTElementVector elems = topoSort();
             // compute rank
-            for (auto& elem : mElements) {
+            for (auto &elem : mElements) {
                 computeRank(elem.second);
             }
-            DFTElementVector elems = topoSort();
             // Set ids
             size_t id = 0;
             for(DFTElementPointer e : elems) {
diff --git a/src/storm-dft/builder/DftExplorationHeuristic.h b/src/storm-dft/builder/DftExplorationHeuristic.h
index b0fd73a66..f71f60dcc 100644
--- a/src/storm-dft/builder/DftExplorationHeuristic.h
+++ b/src/storm-dft/builder/DftExplorationHeuristic.h
@@ -22,7 +22,7 @@ namespace storm {
         class DFTExplorationHeuristic {
 
         public:
-            explicit DFTExplorationHeuristic(size_t id) : id(id), expand(true), lowerBound(storm::utility::zero<ValueType>()), upperBound(storm::utility::infinity<ValueType>()), depth(0), probability(storm::utility::one<ValueType>()) {
+            explicit DFTExplorationHeuristic(size_t id) : id(id), expand(false), lowerBound(storm::utility::zero<ValueType>()), upperBound(storm::utility::infinity<ValueType>()), depth(0), probability(storm::utility::one<ValueType>()) {
                 // Intentionally left empty
             }
 
diff --git a/src/storm-dft/builder/ExplicitDFTModelBuilder.cpp b/src/storm-dft/builder/ExplicitDFTModelBuilder.cpp
index ca9415e48..e71f8e4b9 100644
--- a/src/storm-dft/builder/ExplicitDFTModelBuilder.cpp
+++ b/src/storm-dft/builder/ExplicitDFTModelBuilder.cpp
@@ -202,6 +202,13 @@ namespace storm {
                         STORM_LOG_THROW(false, storm::exceptions::IllegalArgumentException, "Heuristic not known.");
                 }
             }
+
+            auto ftSettings = storm::settings::getModule<storm::settings::modules::FaultTreeSettings>();
+            if (ftSettings.isMaxDepthSet()) {
+                STORM_LOG_ASSERT(usedHeuristic == storm::builder::ApproximationHeuristic::DEPTH, "MaxDepth requires 'depth' exploration heuristic.");
+                approximationThreshold = ftSettings.getMaxDepth();
+            }
+
             exploreStateSpace(approximationThreshold);
 
             size_t stateSize = stateStorage.getNumberOfStates() + (this->uniqueFailedState ? 1 : 0);
@@ -376,7 +383,7 @@ namespace storm {
                     setMarkovian(true);
                     // Add transition to target state with temporary value 0
                     // TODO: what to do when there is no unique target state?
-                    STORM_LOG_ASSERT(this->uniqueFailedState, "Approximation only works with unique failed state");
+                    //STORM_LOG_ASSERT(this->uniqueFailedState, "Approximation only works with unique failed state");
                     matrixBuilder.addTransition(0, storm::utility::zero<ValueType>());
                     // Remember skipped state
                     skippedStates[matrixBuilder.getCurrentRowGroup() - 1] = std::make_pair(currentState, currentExplorationHeuristic);
@@ -418,8 +425,9 @@ namespace storm {
                                     }
 
                                     iter->second.second = heuristic;
-                                    if (state->hasFailed(dft.getTopLevelIndex()) || state->isFailsafe(dft.getTopLevelIndex()) || state->getFailableElements().hasDependencies() || (!state->getFailableElements().hasDependencies() && !state->getFailableElements().hasBEs())) {
-                                        // Do not skip absorbing state or if reached by dependencies
+                                    //if (state->hasFailed(dft.getTopLevelIndex()) || state->isFailsafe(dft.getTopLevelIndex()) || state->getFailableElements().hasDependencies() || (!state->getFailableElements().hasDependencies() && !state->getFailableElements().hasBEs())) {
+                                    if (state->getFailableElements().hasDependencies() || (!state->getFailableElements().hasDependencies() && !state->getFailableElements().hasBEs())) {
+                                            // Do not skip absorbing state or if reached by dependencies
                                         iter->second.second->markExpand();
                                     }
                                     if (usedHeuristic == storm::builder::ApproximationHeuristic::BOUNDDIFFERENCE) {
@@ -519,7 +527,16 @@ namespace storm {
 
         template<typename ValueType, typename StateType>
         std::shared_ptr<storm::models::sparse::Model<ValueType>> ExplicitDFTModelBuilder<ValueType, StateType>::getModel() {
-            STORM_LOG_ASSERT(skippedStates.size() == 0, "Concrete model has skipped states");
+            if (storm::settings::getModule<storm::settings::modules::FaultTreeSettings>().isMaxDepthSet() && skippedStates.size() > 0) {
+                // Give skipped states separate label "skipped"
+                modelComponents.stateLabeling.addLabel("skipped");
+                for (auto it = skippedStates.begin(); it != skippedStates.end(); ++it) {
+                    modelComponents.stateLabeling.addLabelToState("skipped", it->first);
+                }
+            } else{
+                STORM_LOG_ASSERT(skippedStates.size() == 0, "Concrete model has skipped states");
+            }
+
             return createModel(false);
         }
 
diff --git a/src/storm-dft/generator/DftNextStateGenerator.cpp b/src/storm-dft/generator/DftNextStateGenerator.cpp
index 4edd26b4b..f4af34ed1 100644
--- a/src/storm-dft/generator/DftNextStateGenerator.cpp
+++ b/src/storm-dft/generator/DftNextStateGenerator.cpp
@@ -78,6 +78,7 @@ namespace storm {
             // Let BE fail
             bool isFirst = true;
             while (!state->getFailableElements().isEnd()) {
+                //TODO outside
                 if (storm::settings::getModule<storm::settings::modules::FaultTreeSettings>().isTakeFirstDependency() && exploreDependencies && !isFirst) {
                     // We discard further exploration as we already chose one dependent event
                     break;
diff --git a/src/storm-dft/modelchecker/dft/DFTASFChecker.cpp b/src/storm-dft/modelchecker/dft/DFTASFChecker.cpp
index 1b9a2ea89..b43edf72b 100644
--- a/src/storm-dft/modelchecker/dft/DFTASFChecker.cpp
+++ b/src/storm-dft/modelchecker/dft/DFTASFChecker.cpp
@@ -1,405 +1,20 @@
 #include "DFTASFChecker.h"
+#include "SmtConstraint.cpp"
 #include <string>
+
 #include "storm/utility/file.h"
 #include "storm/utility/bitoperations.h"
+#include "storm-parsers/parser/ExpressionCreator.h"
+#include "storm/solver/SmtSolver.h"
+#include "storm/storage/expressions/ExpressionManager.h"
+#include "storm/storage/expressions/Type.h"
 #include "storm/exceptions/NotImplementedException.h"
 #include "storm/exceptions/NotSupportedException.h"
 
 namespace storm {
-    
-    namespace modelchecker {
-        
-        /*
-         * Variable[VarIndex] is the maximum of the others
-         */
-        class IsMaximum : public DFTConstraint {
-        public:
-            IsMaximum(uint64_t varIndex, std::vector<uint64_t> const& varIndices) : varIndex(varIndex), varIndices(varIndices) {
-            }
-            
-            virtual ~IsMaximum() {
-            }
-            
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(and ";
-                // assert it is largereq than all values.
-                for (auto const& ovi : varIndices) {
-                    sstr << "(>= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
-                }
-                // assert it is one of the values.
-                sstr << "(or ";
-                for (auto const& ovi : varIndices) {
-                    sstr << "(= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
-                }
-                sstr << ")"; // end of the or
-                sstr << ")"; // end outer and.
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            std::vector<uint64_t> varIndices;
-        };
-
-
-        /*
-         * First is the minimum of the others
-         */
-        class IsMinimum : public DFTConstraint {
-        public:
-            IsMinimum(uint64_t varIndex, std::vector<uint64_t> const& varIndices) : varIndex(varIndex), varIndices(varIndices) {
-            }
-
-            virtual ~IsMinimum() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(and ";
-                // assert it is smallereq than all values.
-                for (auto const& ovi : varIndices) {
-                    sstr << "(<= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
-                }
-                // assert it is one of the values.
-                sstr << "(or ";
-                for (auto const& ovi : varIndices) {
-                    sstr << "(= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
-                }
-                sstr << ")"; // end of the or
-                sstr << ")"; // end outer and.
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            std::vector<uint64_t> varIndices;
-        };
-                
-
-        class BetweenValues : public DFTConstraint {
-        public:
-            BetweenValues(uint64_t varIndex, uint64_t lower, uint64_t upper) : varIndex(varIndex), upperBound(upper) , lowerBound(lower) {
-            }
-            virtual ~BetweenValues() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(and ";
-                sstr << "(>= " << varNames.at(varIndex) << " " << lowerBound << ")";
-                sstr << "(<= " << varNames.at(varIndex) << " " << upperBound << ")";
-                sstr << ")";
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            uint64_t upperBound;
-            uint64_t lowerBound;
-        };
-
-
-        class And : public DFTConstraint {
-        public:
-            And(std::vector<std::shared_ptr<DFTConstraint>> const& constraints) : constraints(constraints) {
-            }
-            virtual ~And() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                if (constraints.empty()) {
-                    sstr << "true";
-                } else {
-                    sstr << "(and";
-                    for(auto const& c : constraints) {
-                        sstr << " " << c->toSmtlib2(varNames);
-                    }
-                    sstr << ")";
-                }
-                return sstr.str();
-            }
-
-        private:
-            std::vector<std::shared_ptr<DFTConstraint>> constraints;
-
-        };
-
-
-        class Or : public DFTConstraint {
-        public:
-            Or(std::vector<std::shared_ptr<DFTConstraint>> const& constraints) : constraints(constraints) {
-            }
 
-            virtual ~Or() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                if (constraints.empty()) {
-                    sstr << "false";
-                } else {
-                    sstr << "(or";
-                    for(auto const& c : constraints) {
-                        sstr << " " << c->toSmtlib2(varNames);
-                    }
-                    sstr << ")";
-                }
-                return sstr.str();
-            }
-
-        private:
-            std::vector<std::shared_ptr<DFTConstraint>> constraints;
-
-        };
-
-
-        class Implies : public DFTConstraint {
-        public:
-            Implies(std::shared_ptr<DFTConstraint> l, std::shared_ptr<DFTConstraint> r) : lhs(l), rhs(r) {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(=> " << lhs->toSmtlib2(varNames) << " " << rhs->toSmtlib2(varNames) << ")";
-                return sstr.str();
-            }
-
-        private:
-            std::shared_ptr<DFTConstraint> lhs;
-            std::shared_ptr<DFTConstraint> rhs;
-        };
-
-
-        class Iff : public DFTConstraint {
-        public:
-            Iff(std::shared_ptr<DFTConstraint> l, std::shared_ptr<DFTConstraint> r) : lhs(l), rhs(r) {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(= " << lhs->toSmtlib2(varNames) << " " << rhs->toSmtlib2(varNames) << ")";
-                return sstr.str();
-            }
-
-        private:
-            std::shared_ptr<DFTConstraint> lhs;
-            std::shared_ptr<DFTConstraint> rhs;
-        };
-
-
-        class IsTrue : public DFTConstraint {
-        public:
-            IsTrue(bool val) :value(val) {
-            }
-
-            virtual ~IsTrue() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << (value ? "true" : "false");
-                return sstr.str();
-            }
-
-        private:
-            bool value;
-        };
-
-
-        class IsBoolValue : public DFTConstraint {
-        public:
-            IsBoolValue(uint64_t varIndex, bool val) : varIndex(varIndex), value(val) {
-            }
-
-            virtual ~IsBoolValue() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                assert(varIndex < varNames.size());
-                if (value) {
-                    sstr << varNames.at(varIndex);
-                } else {
-                    sstr << "(not " << varNames.at(varIndex) << ")";
-                }
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            bool value;
-        };
-
-
-        class IsConstantValue : public DFTConstraint {
-        public:
-            IsConstantValue(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) {
-            }
-
-            virtual ~IsConstantValue() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                assert(varIndex < varNames.size());
-                sstr << "(= " << varNames.at(varIndex) << " " << value << ")";
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            uint64_t value;
-        };
-
-
-        class IsLessConstant : public DFTConstraint {
-        public:
-            IsLessConstant(uint64_t varIndex, uint64_t val) :varIndex(varIndex), value(val) {
-            }
-
-            virtual ~IsLessConstant() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                assert(varIndex < varNames.size());
-                sstr << "(< " << varNames.at(varIndex) << " " << value << ")";
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            uint64_t value;
-        };
-
-        class IsLessEqualConstant : public DFTConstraint {
-        public:
-            IsLessEqualConstant(uint64_t varIndex, uint64_t val) :varIndex(varIndex), value(val) {
-            }
-
-            virtual ~IsLessEqualConstant() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                assert(varIndex < varNames.size());
-                sstr << "(<= " << varNames.at(varIndex) << " " << value << ")";
-                return sstr.str();
-            }
-
-        private:
-            uint64_t varIndex;
-            uint64_t value;
-        };
-
-
-        class IsEqual : public DFTConstraint {
-        public:
-            IsEqual(uint64_t varIndex1, uint64_t varIndex2) :var1Index(varIndex1), var2Index(varIndex2) {
-            }
-
-            virtual ~IsEqual() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                return "(= " + varNames.at(var1Index) + " " +  varNames.at(var2Index) + ")";
-            }
-
-        private:
-            uint64_t var1Index;
-            uint64_t var2Index;
-        };
-
-
-        class IsLess : public DFTConstraint {
-        public:
-            IsLess(uint64_t varIndex1, uint64_t varIndex2) :var1Index(varIndex1), var2Index(varIndex2) {
-            }
-
-            virtual ~IsLess() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                return "(< " + varNames.at(var1Index) + " " +  varNames.at(var2Index) + ")";
-            }
-
-        private:
-            uint64_t var1Index;
-            uint64_t var2Index;
-        };
-
-
-        class PairwiseDifferent : public DFTConstraint {
-        public:
-            PairwiseDifferent(std::vector<uint64_t> const& indices) : varIndices(indices) {
-            }
-            virtual ~PairwiseDifferent() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(distinct";
-                //                for(uint64_t i = 0; i < varIndices.size(); ++i) {
-                //                    for(uint64_t j = i + 1; j < varIndices.size(); ++j) {
-                //                        sstr << "()";
-                //                    }
-                //                }
-                for (auto const& varIndex : varIndices) {
-                    sstr << " " << varNames.at(varIndex);
-                }
-                sstr << ")";
-                return sstr.str();
-            }
-
-        private:
-            std::vector<uint64_t> varIndices;
-        };
-
-
-        class Sorted : public DFTConstraint {
-        public:
-            Sorted(std::vector<uint64_t> varIndices) : varIndices(varIndices) {
-            }
-
-            virtual ~Sorted() {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(and ";
-                for(uint64_t i = 1; i < varIndices.size(); ++i) {
-                    sstr << "(<= " << varNames.at(varIndices.at(i-1)) << " " << varNames.at(varIndices.at(i)) << ")";
-                }
-                sstr << ") ";
-                return sstr.str();
-            }
-
-        private:
-            std::vector<uint64_t> varIndices;
-        };
-
-
-        class IfThenElse : public DFTConstraint {
-        public:
-            IfThenElse(std::shared_ptr<DFTConstraint> ifC, std::shared_ptr<DFTConstraint> thenC, std::shared_ptr<DFTConstraint> elseC) : ifConstraint(ifC), thenConstraint(thenC), elseConstraint(elseC) {
-            }
-
-            std::string toSmtlib2(std::vector<std::string> const& varNames) const override {
-                std::stringstream sstr;
-                sstr << "(ite " << ifConstraint->toSmtlib2(varNames) << " " << thenConstraint->toSmtlib2(varNames) << " " << elseConstraint->toSmtlib2(varNames) << ")";
-                return sstr.str();
-            }
-
-        private:
-            std::shared_ptr<DFTConstraint> ifConstraint;
-            std::shared_ptr<DFTConstraint> thenConstraint;
-            std::shared_ptr<DFTConstraint> elseConstraint;
-        };
-
-                
-        DFTASFChecker::DFTASFChecker(storm::storage::DFT<double> const& dft) : dft(dft) {
+    namespace modelchecker {
+        DFTASFChecker::DFTASFChecker(storm::storage::DFT<ValueType> const &dft) : dft(dft) {
             // Intentionally left empty.
         }
 
@@ -409,7 +24,7 @@ namespace storm {
 
         void DFTASFChecker::convert() {
             std::vector<uint64_t> beVariables;
-            notFailed = dft.nrBasicElements()+1; // Value indicating the element is not failed
+            notFailed = dft.nrBasicElements() + 1; // Value indicating the element is not failed
 
             // Initialize variables
             for (size_t i = 0; i < dft.nrElements(); ++i) {
@@ -426,12 +41,18 @@ namespace storm {
                     case storm::storage::DFTElementType::SPARE:
                     {
                         auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element);
-                        for( auto const& spareChild : spare->children()) {
+                        for (auto const &spareChild : spare->children()) {
                             varNames.push_back("c_" + element->name() + "_" + spareChild->name());
-                            claimVariables.emplace(SpareAndChildPair(element->id(), spareChild->id()), varNames.size() - 1);
+                            claimVariables.emplace(SpareAndChildPair(element->id(), spareChild->id()),
+                                                   varNames.size() - 1);
                         }
                         break;
                     }
+                    case storm::storage::DFTElementType::PDEP: {
+                        varNames.push_back("dep_" + element->name());
+                        dependencyVariables.emplace(element->id(), varNames.size() - 1);
+                        break;
+                    }
                     default:
                         break;
                 }
@@ -446,7 +67,7 @@ namespace storm {
             // Generate constraints
 
             // All BEs have to fail (first part of constraint 12)
-            for (auto const& beV : beVariables) {
+            for (auto const &beV : beVariables) {
                 constraints.push_back(std::make_shared<BetweenValues>(beV, 1, dft.nrBasicElements()));
             }
 
@@ -455,7 +76,7 @@ namespace storm {
             constraints.back()->setDescription("No two BEs fail at the same time");
 
             // Initialize claim variables in [1, |BE|+1]
-            for (auto const& claimVariable : claimVariables) {
+            for (auto const &claimVariable : claimVariables) {
                 constraints.push_back(std::make_shared<BetweenValues>(claimVariable.second, 0, notFailed));
             }
 
@@ -468,251 +89,340 @@ namespace storm {
                 std::vector<uint64_t> childVarIndices;
                 if (element->isGate()) {
                     std::shared_ptr<storm::storage::DFTGate<ValueType> const> gate = dft.getGate(i);
-                    for (auto const& child : gate->children()) {
+                    for (auto const &child : gate->children()) {
                         childVarIndices.push_back(timePointVariables.at(child->id()));
                     }
                 }
 
                 switch (element->type()) {
-                    case storm::storage::DFTElementType::BE:
+                    case storm::storage::DFTElementType::BE_EXP:
+                    case storm::storage::DFTElementType::BE_CONST:
                         // BEs were already considered before
                         break;
                     case storm::storage::DFTElementType::AND:
-                        // Constraint for AND gate (constraint 1)
-                        constraints.push_back(std::make_shared<IsMaximum>(timePointVariables.at(i), childVarIndices));
-                        constraints.back()->setDescription("AND gate " + element->name());
+                        generateAndConstraint(i, childVarIndices, element);
                         break;
                     case storm::storage::DFTElementType::OR:
-                        // Constraint for OR gate (constraint 2)
-                        constraints.push_back(std::make_shared<IsMinimum>(timePointVariables.at(i), childVarIndices));
-                        constraints.back()->setDescription("OR gate " + element->name());
+                        generateOrConstraint(i, childVarIndices, element);
                         break;
                     case storm::storage::DFTElementType::VOT:
-                    {
-                        // VOTs are implemented via OR over ANDs with all possible combinations
-                        auto vot = std::static_pointer_cast<storm::storage::DFTVot<double> const>(element);
-                        std::vector<uint64_t> tmpVars;
-                        size_t k = 0;
-                        // Generate all permutations of k out of n
-                        size_t combination = smallestIntWithNBitsSet(static_cast<size_t>(vot->threshold()));
-                        do {
-                            // Construct selected children from combination
-                            std::vector<uint64_t> combinationChildren;
-                            for (size_t j = 0; j < vot->nrChildren(); ++j) {
-                                if (combination & (1 << j)) {
-                                    combinationChildren.push_back(childVarIndices.at(j));
-                                }
-                            }
-                            // Introduce temporary variable for this AND
-                            varNames.push_back("v_" + vot->name() + "_" + std::to_string(k));
-                            size_t index = varNames.size() - 1;
-                            tmpVars.push_back(index);
-                            tmpTimePointVariables.push_back(index);
-                            // AND over the selected children
-                            constraints.push_back(std::make_shared<IsMaximum>(index, combinationChildren));
-                            constraints.back()->setDescription("VOT gate " + element->name() + ": AND no. " + std::to_string(k));
-                            // Generate next permutation
-                            combination = nextBitPermutation(combination);
-                            ++k;
-                        } while(combination < (1 << vot->nrChildren()) && combination != 0);
-
-                        // Constraint is OR over all possible combinations
-                        constraints.push_back(std::make_shared<IsMinimum>(timePointVariables.at(i), tmpVars));
-                        constraints.back()->setDescription("VOT gate " + element->name() + ": OR");
+                        generateVotConstraint(i, childVarIndices, element);
                         break;
-                    }
                     case storm::storage::DFTElementType::PAND:
-                    {
-                        // Constraint for PAND gate (constraint 3)
-                        std::shared_ptr<DFTConstraint> ifC = std::make_shared<Sorted>(childVarIndices);
-                        std::shared_ptr<DFTConstraint> thenC = std::make_shared<IsEqual>(timePointVariables.at(i), childVarIndices.back());
-                        std::shared_ptr<DFTConstraint> elseC = std::make_shared<IsConstantValue>(timePointVariables.at(i), notFailed);
-                        constraints.push_back(std::make_shared<IfThenElse>(ifC, thenC, elseC));
-                        constraints.back()->setDescription("PAND gate " + element->name());
+                        generatePandConstraint(i, childVarIndices, element);
                         break;
-                    }
                     case storm::storage::DFTElementType::POR:
-                    {
-                        // Constraint for POR gate
-                        // First child fails before all others
-                        std::vector<std::shared_ptr<DFTConstraint>> firstSmallestC;
-                        uint64_t timeFirstChild = childVarIndices.front();
-                        for (uint64_t i = 1; i < childVarIndices.size(); ++i) {
-                            firstSmallestC.push_back(std::make_shared<IsLess>(timeFirstChild, childVarIndices.at(i)));
-                        }
-                        std::shared_ptr<DFTConstraint> ifC = std::make_shared<And>(firstSmallestC);
-                        std::shared_ptr<DFTConstraint> thenC = std::make_shared<IsEqual>(timePointVariables.at(i), childVarIndices.front());
-                        std::shared_ptr<DFTConstraint> elseC = std::make_shared<IsConstantValue>(timePointVariables.at(i), notFailed);
-                        constraints.push_back(std::make_shared<IfThenElse>(ifC, thenC, elseC));
-                        constraints.back()->setDescription("POR gate " + element->name());
+                        generatePorConstraint(i, childVarIndices, element);
                         break;
-                    }
                     case storm::storage::DFTElementType::SEQ:
-                    {
-                        // Constraint for SEQ gate (constraint 4)
-                        // As the restriction is not a gate we have to enumerate its children here
-                        auto seq = std::static_pointer_cast<storm::storage::DFTRestriction<double> const>(element);
-                        for (auto const& child : seq->children()) {
-                            childVarIndices.push_back(timePointVariables.at(child->id()));
-                        }
-
-                        constraints.push_back(std::make_shared<Sorted>(childVarIndices));
-                        constraints.back()->setDescription("SEQ gate " + element->name());
+                        generateSeqConstraint(childVarIndices, element);
                         break;
-                    }
                     case storm::storage::DFTElementType::SPARE:
-                    {
-                        auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element);
-                        auto const& children = spare->children();
-                        uint64_t firstChild = children.front()->id();
-                        uint64_t lastChild = children.back()->id();
-
-                        // First child of each spare is claimed in the beginning
-                        constraints.push_back(std::make_shared<IsConstantValue>(getClaimVariableIndex(spare->id(), firstChild), 0));
-                        constraints.back()->setDescription("SPARE gate " + spare->name() + " claims first child");
-
-                        // If last child is claimed before failure, then the spare fails when the last child fails (constraint 5)
-                        std::shared_ptr<DFTConstraint> leftC = std::make_shared<IsLess>(getClaimVariableIndex(spare->id(), lastChild), childVarIndices.back());
-                        constraints.push_back(std::make_shared<Implies>(leftC, std::make_shared<IsEqual>(timePointVariables.at(i), childVarIndices.back())));
-                        constraints.back()->setDescription("Last child & claimed -> SPARE fails");
-
-                        // Construct constraint for trying to claim next child
-                        STORM_LOG_ASSERT(children.size() >= 2, "Spare has only one child");
-                        for (uint64_t currChild = 0; currChild < children.size() - 1; ++currChild) {
-                            uint64_t timeCurrChild = childVarIndices.at(currChild); // Moment when current child fails
-
-                            // If i-th child fails after being claimed, then try to claim next child (constraint 6)
-                            std::shared_ptr<DFTConstraint> tryClaimC = generateTryToClaimConstraint(spare, currChild + 1, timeCurrChild);
-                            constraints.push_back(std::make_shared<Iff>(std::make_shared<IsLess>(getClaimVariableIndex(spare->id(), children.at(currChild)->id()), timeCurrChild), tryClaimC));
-                            constraints.back()->setDescription("Try to claim " + std::to_string(currChild+2) + "th child");
-                        }
+                        generateSpareConstraint(i, childVarIndices, element);
                         break;
-                    }
                     case storm::storage::DFTElementType::PDEP:
-                        // FDEPs are considered later in the Markovian constraints
+                        generatePdepConstraint(i, childVarIndices, element);
                         break;
                     default:
-                        STORM_LOG_THROW(false, storm::exceptions::NotSupportedException, "SMT encoding for type '" << element->type() << "' is not supported.");
+                        STORM_LOG_THROW(false, storm::exceptions::NotSupportedException,
+                                        "SMT encoding for type '" << element->type() << "' is not supported.");
                         break;
                 }
             }
 
-            // Only one spare can claim a child (constraint 8)
-            // and only not failed childs can be claimed (addition to constrain 8)
-            for (size_t i = 0; i < dft.nrElements(); ++i) {
-                std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i);
-                if (element->isSpareGate()) {
-                    auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element);
-                    for (auto const& child : spare->children()) {
-                        std::vector<std::shared_ptr<DFTConstraint>> additionalC;
-                        uint64_t timeClaiming = getClaimVariableIndex(spare->id(), child->id());
-                        std::shared_ptr<DFTConstraint> leftC = std::make_shared<IsLessConstant>(timeClaiming, notFailed);
-                        // Child must be operational at time of claiming
-                        additionalC.push_back(std::make_shared<IsLess>(timeClaiming, timePointVariables.at(child->id())));
-                        // No other spare claims this child
-                        for (auto const& parent : child->parents()) {
-                            if (parent->isSpareGate() && parent->id() != spare->id()) {
-                                // Different spare
-                                additionalC.push_back(std::make_shared<IsConstantValue>(getClaimVariableIndex(parent->id(), child->id()), notFailed));
-                            }
-                        }
-                        constraints.push_back(std::make_shared<Implies>(leftC, std::make_shared<And>(additionalC)));
-                        constraints.back()->setDescription("Child " + child->name() + " must be operational at time of claiming by spare " + spare->name() + " and can only be claimed by one spare.");
+            // Constraint (8) intentionally commented out for testing purposes
+            //addClaimingConstraints();
+
+            // Handle dependencies
+            addMarkovianConstraints();
+        }
+
+        // Constraint Generator Functions
+
+        void DFTASFChecker::generateAndConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                  std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            // Constraint for AND gate (constraint 1)
+            constraints.push_back(std::make_shared<IsMaximum>(timePointVariables.at(i), childVarIndices));
+            constraints.back()->setDescription("AND gate " + element->name());
+        }
+
+        void DFTASFChecker::generateOrConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                 std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            // Constraint for OR gate (constraint 2)
+            constraints.push_back(std::make_shared<IsMinimum>(timePointVariables.at(i), childVarIndices));
+            constraints.back()->setDescription("OR gate " + element->name());
+        }
+
+        void DFTASFChecker::generateVotConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                  std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            auto vot = std::static_pointer_cast<storm::storage::DFTVot<double> const>(element);
+            // VOTs are implemented via OR over ANDs with all possible combinations
+            std::vector<uint64_t> tmpVars;
+            size_t k = 0;
+            // Generate all permutations of k out of n
+            size_t combination = smallestIntWithNBitsSet(static_cast<size_t>(vot->threshold()));
+            do {
+                // Construct selected children from combination
+                std::vector<uint64_t> combinationChildren;
+                for (size_t j = 0; j < vot->nrChildren(); ++j) {
+                    if (combination & (1 << j)) {
+                        combinationChildren.push_back(childVarIndices.at(j));
                     }
                 }
+                // Introduce temporary variable for this AND
+                varNames.push_back("v_" + vot->name() + "_" + std::to_string(k));
+                size_t index = varNames.size() - 1;
+                tmpVars.push_back(index);
+                tmpTimePointVariables.push_back(index);
+                // AND over the selected children
+                constraints.push_back(std::make_shared<IsMaximum>(index, combinationChildren));
+                constraints.back()->setDescription("VOT gate " + element->name() + ": AND no. " + std::to_string(k));
+                // Generate next permutation
+                combination = nextBitPermutation(combination);
+                ++k;
+            } while (combination < (1 << vot->nrChildren()) && combination != 0);
+
+            // Constraint is OR over all possible combinations
+            constraints.push_back(std::make_shared<IsMinimum>(timePointVariables.at(i), tmpVars));
+            constraints.back()->setDescription("VOT gate " + element->name() + ": OR");
+        }
+
+        void DFTASFChecker::generatePandConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                   std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            // Constraint for PAND gate (constraint 3)
+            std::shared_ptr<SmtConstraint> ifC = std::make_shared<Sorted>(childVarIndices);
+            std::shared_ptr<SmtConstraint> thenC = std::make_shared<IsEqual>(timePointVariables.at(i),
+                                                                             childVarIndices.back());
+            std::shared_ptr<SmtConstraint> elseC = std::make_shared<IsConstantValue>(timePointVariables.at(i),
+                                                                                     notFailed);
+            constraints.push_back(std::make_shared<IfThenElse>(ifC, thenC, elseC));
+            constraints.back()->setDescription("PAND gate " + element->name());
+        }
+
+        void DFTASFChecker::generatePorConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                  std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            // Constraint for POR gate
+            // First child fails before all others
+            std::vector<std::shared_ptr<SmtConstraint>> firstSmallestC;
+            uint64_t timeFirstChild = childVarIndices.front();
+            for (uint64_t i = 1; i < childVarIndices.size(); ++i) {
+                firstSmallestC.push_back(std::make_shared<IsLess>(timeFirstChild, childVarIndices.at(i)));
+            }
+            std::shared_ptr<SmtConstraint> ifC = std::make_shared<And>(firstSmallestC);
+            std::shared_ptr<SmtConstraint> thenC = std::make_shared<IsEqual>(timePointVariables.at(i),
+                                                                             childVarIndices.front());
+            std::shared_ptr<SmtConstraint> elseC = std::make_shared<IsConstantValue>(timePointVariables.at(i),
+                                                                                     notFailed);
+            constraints.push_back(std::make_shared<IfThenElse>(ifC, thenC, elseC));
+            constraints.back()->setDescription("POR gate " + element->name());
+        }
+
+        void DFTASFChecker::generateSeqConstraint(std::vector<uint64_t> childVarIndices,
+                                                  std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            // Constraint for SEQ gate (constraint 4)
+            // As the restriction is not a gate we have to enumerate its children here
+            auto seq = std::static_pointer_cast<storm::storage::DFTRestriction<double> const>(element);
+            for (auto const &child : seq->children()) {
+                childVarIndices.push_back(timePointVariables.at(child->id()));
             }
 
-            // Handle dependencies
-            addMarkovianConstraints();
+            constraints.push_back(std::make_shared<Sorted>(childVarIndices));
+            constraints.back()->setDescription("SEQ gate " + element->name());
+        }
 
-            // Toplevel element will not fail (part of constraint 13)
-            constraints.push_back(std::make_shared<IsConstantValue>(timePointVariables.at(dft.getTopLevelIndex()), notFailed));
-            constraints.back()->setDescription("Toplevel element should not fail");
+        void DFTASFChecker::generateSpareConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                    std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element);
+            auto const &children = spare->children();
+            uint64_t firstChild = children.front()->id();
+            uint64_t lastChild = children.back()->id();
+
+            // First child of each spare is claimed in the beginning
+            constraints.push_back(std::make_shared<IsConstantValue>(getClaimVariableIndex(spare->id(), firstChild), 0));
+            constraints.back()->setDescription("SPARE gate " + spare->name() + " claims first child");
+
+            // If last child is claimed before failure, then the spare fails when the last child fails (constraint 5)
+            std::shared_ptr<SmtConstraint> leftC = std::make_shared<IsLess>(
+                    getClaimVariableIndex(spare->id(), lastChild), childVarIndices.back());
+            constraints.push_back(std::make_shared<Implies>(leftC, std::make_shared<IsEqual>(timePointVariables.at(i),
+                                                                                             childVarIndices.back())));
+            constraints.back()->setDescription("Last child & claimed -> SPARE fails");
+
+            // Construct constraint for trying to claim next child
+            STORM_LOG_ASSERT(children.size() >= 2, "Spare has only one child");
+            for (uint64_t currChild = 0; currChild < children.size() - 1; ++currChild) {
+                uint64_t timeCurrChild = childVarIndices.at(currChild); // Moment when current child fails
+                // If i-th child fails after being claimed, then try to claim next child (constraint 6)
+                std::shared_ptr<SmtConstraint> tryClaimC = generateTryToClaimConstraint(spare, currChild + 1,
+                                                                                        timeCurrChild);
+                constraints.push_back(std::make_shared<Iff>(
+                        std::make_shared<IsLess>(getClaimVariableIndex(spare->id(), children.at(currChild)->id()),
+                                                 timeCurrChild), tryClaimC));
+                constraints.back()->setDescription("Try to claim " + std::to_string(currChild + 2) + "th child");
+            }
         }
 
-        std::shared_ptr<DFTConstraint> DFTASFChecker::generateTryToClaimConstraint(std::shared_ptr<storm::storage::DFTSpare<ValueType> const> spare, uint64_t childIndex, uint64_t timepoint) const {
+        std::shared_ptr<SmtConstraint>
+        DFTASFChecker::generateTryToClaimConstraint(std::shared_ptr<storm::storage::DFTSpare<ValueType> const> spare,
+                                                    uint64_t childIndex, uint64_t timepoint) const {
             auto child = spare->children().at(childIndex);
             uint64_t timeChild = timePointVariables.at(child->id()); // Moment when the child fails
             uint64_t claimChild = getClaimVariableIndex(spare->id(), child->id()); // Moment the spare claims the child
 
-            std::vector<std::shared_ptr<DFTConstraint>> noClaimingPossible;
+            std::vector<std::shared_ptr<SmtConstraint>> noClaimingPossible;
             // Child cannot be claimed.
             if (childIndex + 1 < spare->children().size()) {
                 // Consider next child for claiming (second case in constraint 7)
-                noClaimingPossible.push_back(generateTryToClaimConstraint(spare, childIndex+1, timepoint));
+                noClaimingPossible.push_back(generateTryToClaimConstraint(spare, childIndex + 1, timepoint));
             } else {
                 // Last child: spare fails at same point as this child (third case in constraint 7)
                 noClaimingPossible.push_back(std::make_shared<IsEqual>(timePointVariables.at(spare->id()), timepoint));
             }
-            std::shared_ptr<DFTConstraint> elseCaseC = std::make_shared<And>(noClaimingPossible);
+            std::shared_ptr<SmtConstraint> elseCaseC = std::make_shared<And>(noClaimingPossible);
 
-            // Check if next child is availble (first case in constraint 7)
-            std::vector<std::shared_ptr<DFTConstraint>> claimingPossibleC;
+            // Check if next child is available (first case in constraint 7)
+            std::vector<std::shared_ptr<SmtConstraint>> claimingPossibleC;
             // Next child is not yet failed
             claimingPossibleC.push_back(std::make_shared<IsLess>(timepoint, timeChild));
             // Child is not yet claimed by a different spare
-            for (auto const& otherSpare : child->parents()) {
+            for (auto const &otherSpare : child->parents()) {
                 if (otherSpare->id() == spare->id()) {
                     // not a different spare.
                     continue;
                 }
-                claimingPossibleC.push_back(std::make_shared<IsConstantValue>(getClaimVariableIndex(otherSpare->id(), child->id()), notFailed));
+                claimingPossibleC.push_back(std::make_shared<IsLess>(timepoint,
+                                                                     getClaimVariableIndex(otherSpare->id(),
+                                                                                           child->id())));
             }
 
             // Claim child if available
-            std::shared_ptr<DFTConstraint> firstCaseC = std::make_shared<IfThenElse>(std::make_shared<And>(claimingPossibleC), std::make_shared<IsEqual>(claimChild, timepoint), elseCaseC);
+            std::shared_ptr<SmtConstraint> firstCaseC = std::make_shared<IfThenElse>(
+                    std::make_shared<And>(claimingPossibleC), std::make_shared<IsEqual>(claimChild, timepoint),
+                    elseCaseC);
             return firstCaseC;
         }
 
+        void DFTASFChecker::addClaimingConstraints() {
+            // Only one spare can claim a child (constraint 8)
+            // and only not failed children can be claimed (addition to constrain 8)
+            for (size_t i = 0; i < dft.nrElements(); ++i) {
+                std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i);
+                if (element->isSpareGate()) {
+                    auto spare = std::static_pointer_cast<storm::storage::DFTSpare<double> const>(element);
+                    for (auto const &child : spare->children()) {
+                        std::vector<std::shared_ptr<SmtConstraint>> additionalC;
+                        uint64_t timeClaiming = getClaimVariableIndex(spare->id(), child->id());
+                        std::shared_ptr<SmtConstraint> leftC = std::make_shared<IsLessConstant>(timeClaiming,
+                                                                                                notFailed);
+                        // Child must be operational at time of claiming
+                        additionalC.push_back(
+                                std::make_shared<IsLess>(timeClaiming, timePointVariables.at(child->id())));
+                        // No other spare claims this child
+                        for (auto const &parent : child->parents()) {
+                            if (parent->isSpareGate() && parent->id() != spare->id()) {
+                                // Different spare
+                                additionalC.push_back(std::make_shared<IsConstantValue>(
+                                        getClaimVariableIndex(parent->id(), child->id()), notFailed));
+                            }
+                        }
+                        constraints.push_back(std::make_shared<Implies>(leftC, std::make_shared<And>(additionalC)));
+                        constraints.back()->setDescription(
+                                "Child " + child->name() + " must be operational at time of claiming by spare " +
+                                spare->name() + " and can only be claimed by one spare.");
+                    }
+                }
+            }
+        }
+
+        void DFTASFChecker::generatePdepConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                                   std::shared_ptr<storm::storage::DFTElement<ValueType> const> element) {
+            auto dependency = std::static_pointer_cast<storm::storage::DFTDependency<double> const>(element);
+            auto const &dependentEvents = dependency->dependentEvents();
+            auto const &trigger = dependency->triggerEvent();
+            std::vector<uint64_t> dependentIndices;
+            for (size_t j = 0; j < dependentEvents.size(); ++j) {
+                dependentIndices.push_back(dependentEvents[j]->id());
+            }
+
+            constraints.push_back(std::make_shared<IsMaximum>(dependencyVariables.at(i), dependentIndices));
+            constraints.back()->setDescription("Dependency " + element->name() + ": Last element");
+            constraints.push_back(
+                    std::make_shared<IsEqual>(timePointVariables.at(i), timePointVariables.at(trigger->id())));
+            constraints.back()->setDescription("Dependency " + element->name() + ": Trigger element");
+        }
+
         void DFTASFChecker::addMarkovianConstraints() {
             uint64_t nrMarkovian = dft.nrBasicElements();
+            std::set<size_t> depElements;
             // Vector containing (non-)Markovian constraints for each timepoint
-            std::vector<std::vector<std::shared_ptr<DFTConstraint>>> markovianC(nrMarkovian);
-            std::vector<std::vector<std::shared_ptr<DFTConstraint>>> nonMarkovianC(nrMarkovian);
-            std::vector<std::vector<std::shared_ptr<DFTConstraint>>> notColdC(nrMarkovian);
+            std::vector<std::vector<std::shared_ptr<SmtConstraint>>> markovianC(nrMarkovian);
+            std::vector<std::vector<std::shared_ptr<SmtConstraint>>> nonMarkovianC(nrMarkovian);
+            std::vector<std::vector<std::shared_ptr<SmtConstraint>>> notColdC(nrMarkovian);
 
             // All dependent events of a failed trigger have failed as well (constraint 9)
             for (size_t j = 0; j < dft.nrElements(); ++j) {
                 std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(j);
                 if (element->hasOutgoingDependencies()) {
                     for (uint64_t i = 0; i < nrMarkovian; ++i) {
-                        std::shared_ptr<DFTConstraint> triggerFailed = std::make_shared<IsLessEqualConstant>(timePointVariables.at(j), i);
-                        std::vector<std::shared_ptr<DFTConstraint>> depFailed;
-                        for (auto const& dependency : element->outgoingDependencies()) {
-                            for (auto const& depElement : dependency->dependentEvents()) {
-                                depFailed.push_back(std::make_shared<IsLessEqualConstant>(timePointVariables.at(depElement->id()), i));
+                        std::shared_ptr<SmtConstraint> triggerFailed = std::make_shared<IsLessEqualConstant>(
+                                timePointVariables.at(j), i);
+                        std::vector<std::shared_ptr<SmtConstraint>> depFailed;
+                        for (auto const &dependency : element->outgoingDependencies()) {
+                            for (auto const &depElement : dependency->dependentEvents()) {
+                                depFailed.push_back(
+                                        std::make_shared<IsLessEqualConstant>(timePointVariables.at(depElement->id()),
+                                                                              i));
                             }
                         }
-                        markovianC[i].push_back(std::make_shared<Implies>(triggerFailed, std::make_shared<And>(depFailed)));
+                        markovianC[i].push_back(
+                                std::make_shared<Implies>(triggerFailed, std::make_shared<And>(depFailed)));
                     }
                 }
             }
             for (uint64_t i = 0; i < nrMarkovian; ++i) {
-                constraints.push_back(std::make_shared<Iff>(std::make_shared<IsBoolValue>(markovianVariables.at(i), true), std::make_shared<And>(markovianC[i])));
-                constraints.back()->setDescription("Markovian (" + std::to_string(i) + ") iff all dependent events which trigger failed also failed.");
+                constraints.push_back(
+                        std::make_shared<Iff>(std::make_shared<IsBoolValue>(markovianVariables.at(i), true),
+                                              std::make_shared<And>(markovianC[i])));
+                constraints.back()->setDescription("Markovian (" + std::to_string(i) +
+                                                   ") iff all dependent events which trigger failed also failed.");
             }
 
-            // In non-Markovian steps the next failed element is a dependent BE (constraint 10)
+            // In non-Markovian steps the next failed element is a dependent BE (constraint 10) + additions to specification in paper
             for (size_t j = 0; j < dft.nrElements(); ++j) {
                 std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(j);
                 if (element->isBasicElement()) {
                     auto be = std::static_pointer_cast<storm::storage::DFTBE<double> const>(element);
 
                     if (be->hasIngoingDependencies()) {
-                        for (uint64_t i = 0; i < nrMarkovian -1; ++i) {
-                            std::shared_ptr<DFTConstraint> nextFailure = std::make_shared<IsConstantValue>(timePointVariables.at(j), i+1);
-                            std::vector<std::shared_ptr<DFTConstraint>> triggerFailed;
-                            for (auto const& dependency : be->ingoingDependencies()) {
-                                triggerFailed.push_back(std::make_shared<IsLessEqualConstant>(timePointVariables.at(dependency->triggerEvent()->id()), i));
+                        depElements.emplace(j);
+                        for (uint64_t i = 0; i < nrMarkovian - 1; ++i) {
+                            std::shared_ptr<SmtConstraint> nextFailure = std::make_shared<IsConstantValue>(
+                                    timePointVariables.at(j), i + 1);
+                            std::vector<std::shared_ptr<SmtConstraint>> triggerFailed;
+                            for (auto const &dependency : be->ingoingDependencies()) {
+                                triggerFailed.push_back(std::make_shared<IsLessEqualConstant>(
+                                        timePointVariables.at(dependency->triggerEvent()->id()), i));
                             }
-                            nonMarkovianC[i].push_back(std::make_shared<Implies>(nextFailure, std::make_shared<Or>(triggerFailed)));
+                            nonMarkovianC[i].push_back(
+                                    std::make_shared<Implies>(nextFailure, std::make_shared<Or>(triggerFailed)));
                         }
                     }
                 }
             }
             for (uint64_t i = 0; i < nrMarkovian; ++i) {
-                constraints.push_back(std::make_shared<Implies>(std::make_shared<IsBoolValue>(markovianVariables.at(i), false), std::make_shared<And>(nonMarkovianC[i])));
-                constraints.back()->setDescription("Non-Markovian (" + std::to_string(i) + ") -> next failure is dependent BE.");
+                std::vector<std::shared_ptr<SmtConstraint>> dependentConstr;
+                for (auto dependentEvent: depElements) {
+                    std::shared_ptr<SmtConstraint> nextFailure = std::make_shared<IsConstantValue>(
+                            timePointVariables.at(dependentEvent), i + 1);
+                    dependentConstr.push_back(nextFailure);
+                }
+                // Add Constraint that any DEPENDENT event has to fail next
+                nonMarkovianC[i].push_back(std::make_shared<Or>(dependentConstr));
+                constraints.push_back(
+                        std::make_shared<Implies>(std::make_shared<IsBoolValue>(markovianVariables.at(i), false),
+                                                  std::make_shared<And>(nonMarkovianC[i])));
+                constraints.back()->setDescription(
+                        "Non-Markovian (" + std::to_string(i) + ") -> next failure is dependent BE.");
             }
 
             // In Markovian steps the failure rate is positive (constraint 11)
@@ -721,7 +431,8 @@ namespace storm {
                 if (element->isBasicElement()) {
                     auto be = std::static_pointer_cast<storm::storage::DFTBE<double> const>(element);
                     for (uint64_t i = 0; i < nrMarkovian; ++i) {
-                        std::shared_ptr<DFTConstraint> nextFailure = std::make_shared<IsConstantValue>(timePointVariables.at(j), i+1);
+                        std::shared_ptr<SmtConstraint> nextFailure = std::make_shared<IsConstantValue>(
+                                timePointVariables.at(j), i + 1);
                         // BE is not cold
                         // TODO: implement use of activation variables here
                         notColdC[i].push_back(std::make_shared<Implies>(nextFailure, std::make_shared<IsTrue>(be->canFail())));
@@ -729,35 +440,41 @@ namespace storm {
                 }
             }
             for (uint64_t i = 0; i < nrMarkovian; ++i) {
-                constraints.push_back(std::make_shared<Implies>(std::make_shared<IsBoolValue>(markovianVariables.at(i), true), std::make_shared<And>(notColdC[i])));
+                constraints.push_back(
+                        std::make_shared<Implies>(std::make_shared<IsBoolValue>(markovianVariables.at(i), true),
+                                                  std::make_shared<And>(notColdC[i])));
                 constraints.back()->setDescription("Markovian (" + std::to_string(i) + ") -> positive failure rate.");
             }
 
         }
 
 
-        void DFTASFChecker::toFile(std::string const& filename) {
+        void DFTASFChecker::toFile(std::string const &filename) {
             std::ofstream stream;
             storm::utility::openFile(filename, stream);
             stream << "; time point variables" << std::endl;
-            for (auto const& timeVarEntry : timePointVariables) {
+            for (auto const &timeVarEntry : timePointVariables) {
                 stream << "(declare-fun " << varNames[timeVarEntry.second] << "()  Int)" << std::endl;
             }
             stream << "; claim variables" << std::endl;
-            for (auto const& claimVarEntry : claimVariables) {
+            for (auto const &claimVarEntry : claimVariables) {
                 stream << "(declare-fun " << varNames[claimVarEntry.second] << "() Int)" << std::endl;
             }
             stream << "; Markovian variables" << std::endl;
-            for (auto const& markovianVarEntry : markovianVariables) {
+            for (auto const &markovianVarEntry : markovianVariables) {
                 stream << "(declare-fun " << varNames[markovianVarEntry.second] << "() Bool)" << std::endl;
             }
+            stream << "; Dependency variables" << std::endl;
+            for (auto const &depVarEntry : dependencyVariables) {
+                stream << "(declare-fun " << varNames[depVarEntry.second] << "() Int)" << std::endl;
+            }
             if (!tmpTimePointVariables.empty()) {
                 stream << "; Temporary variables" << std::endl;
-                for (auto const& tmpVar : tmpTimePointVariables) {
+                for (auto const &tmpVar : tmpTimePointVariables) {
                     stream << "(declare-fun " << varNames[tmpVar] << "() Int)" << std::endl;
                 }
             }
-            for (auto const& constraint : constraints) {
+            for (auto const &constraint : constraints) {
                 if (!constraint->description().empty()) {
                     stream << "; " << constraint->description() << std::endl;
                 }
@@ -766,5 +483,262 @@ namespace storm {
             stream << "(check-sat)" << std::endl;
             storm::utility::closeFile(stream);
         }
+
+        void DFTASFChecker::toSolver() {
+            // First convert the DFT
+            convert();
+
+            std::shared_ptr<storm::expressions::ExpressionManager> manager(new storm::expressions::ExpressionManager());
+            solver = storm::utility::solver::SmtSolverFactory().create(
+                    *manager);
+            //Add variables to manager
+            for (auto const &timeVarEntry : timePointVariables) {
+                manager->declareIntegerVariable(varNames[timeVarEntry.second]);
+            }
+            for (auto const &claimVarEntry : claimVariables) {
+                manager->declareIntegerVariable(varNames[claimVarEntry.second]);
+            }
+            for (auto const &markovianVarEntry : markovianVariables) {
+                manager->declareBooleanVariable(varNames[markovianVarEntry.second]);
+            }
+            if (!tmpTimePointVariables.empty()) {
+                for (auto const &tmpVar : tmpTimePointVariables) {
+                    manager->declareIntegerVariable(varNames[tmpVar]);
+                }
+            }
+            for (auto const &depVarEntry : dependencyVariables) {
+                manager->declareIntegerVariable(varNames[depVarEntry.second]);
+            }
+            // Add constraints to solver
+            for (auto const &constraint : constraints) {
+                solver->add(constraint->toExpression(varNames, manager));
+            }
+
+        }
+
+        storm::solver::SmtSolver::CheckResult DFTASFChecker::checkTleFailsWithEq(uint64_t bound) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+
+            // Set backtracking marker to check several properties without reconstructing DFT encoding
+            solver->push();
+            // Constraint that toplevel element can fail with less or equal 'bound' failures
+            std::shared_ptr<SmtConstraint> tleFailedConstr = std::make_shared<IsConstantValue>(
+                    timePointVariables.at(dft.getTopLevelIndex()), bound);
+            std::shared_ptr<storm::expressions::ExpressionManager> manager = solver->getManager().getSharedPointer();
+            solver->add(tleFailedConstr->toExpression(varNames, manager));
+            storm::solver::SmtSolver::CheckResult res = solver->check();
+            solver->pop();
+            return res;
+        }
+
+        storm::solver::SmtSolver::CheckResult DFTASFChecker::checkTleFailsWithLeq(uint64_t bound) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+
+            // Set backtracking marker to check several properties without reconstructing DFT encoding
+            solver->push();
+            // Constraint that toplevel element can fail with less or equal 'bound' failures
+            std::shared_ptr<SmtConstraint> tleNeverFailedConstr = std::make_shared<IsLessEqualConstant>(
+                    timePointVariables.at(dft.getTopLevelIndex()), bound);
+            std::shared_ptr<storm::expressions::ExpressionManager> manager = solver->getManager().getSharedPointer();
+            solver->add(tleNeverFailedConstr->toExpression(varNames, manager));
+            storm::solver::SmtSolver::CheckResult res = solver->check();
+            solver->pop();
+            return res;
+        }
+
+        void DFTASFChecker::setSolverTimeout(uint_fast64_t milliseconds) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, timeout cannot be set");
+            solver->setTimeout(milliseconds);
+        }
+
+        void DFTASFChecker::unsetSolverTimeout() {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, timeout cannot be unset");
+            solver->unsetTimeout();
+        }
+
+        storm::solver::SmtSolver::CheckResult DFTASFChecker::checkTleNeverFailed() {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            return checkTleFailsWithEq(notFailed);
+        }
+
+        storm::solver::SmtSolver::CheckResult
+        DFTASFChecker::checkFailsLeqWithEqNonMarkovianState(uint64_t checkbound, uint64_t nrNonMarkovian) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            std::vector<uint64_t> markovianIndices;
+            // Get Markovian variable indices up until given timepoint
+            for (uint64_t i = 0; i < checkbound; ++i) {
+                markovianIndices.push_back(markovianVariables.at(i));
+            }
+            // Set backtracking marker to check several properties without reconstructing DFT encoding
+            solver->push();
+            // Constraint that TLE fails before or during given timepoint
+            std::shared_ptr<SmtConstraint> tleFailedConstr = std::make_shared<IsLessEqualConstant>(
+                    timePointVariables.at(dft.getTopLevelIndex()), checkbound);
+            std::shared_ptr<storm::expressions::ExpressionManager> manager = solver->getManager().getSharedPointer();
+            solver->add(tleFailedConstr->toExpression(varNames, manager));
+
+            // Constraint that a given number of non-Markovian states are visited
+            std::shared_ptr<SmtConstraint> nonMarkovianConstr = std::make_shared<FalseCountIsEqualConstant>(
+                    markovianIndices, nrNonMarkovian);
+            solver->add(nonMarkovianConstr->toExpression(varNames, manager));
+            storm::solver::SmtSolver::CheckResult res = solver->check();
+            solver->pop();
+            return res;
+        }
+
+        storm::solver::SmtSolver::CheckResult
+        DFTASFChecker::checkFailsAtTimepointWithOnlyMarkovianState(uint64_t timepoint) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            std::vector<uint64_t> markovianIndices;
+            // Get Markovian variable indices
+            for (uint64_t i = 0; i < timepoint; ++i) {
+                markovianIndices.push_back(markovianVariables.at(i));
+            }
+            // Set backtracking marker to check several properties without reconstructing DFT encoding
+            solver->push();
+            // Constraint that toplevel element can fail with less than 'checkNumber' Markovian states visited
+            std::shared_ptr<SmtConstraint> countConstr = std::make_shared<TrueCountIsConstantValue>(
+                    markovianIndices, timepoint);
+            // Constraint that TLE fails at timepoint
+            std::shared_ptr<SmtConstraint> timepointConstr = std::make_shared<IsConstantValue>(
+                    timePointVariables.at(dft.getTopLevelIndex()), timepoint);
+            std::shared_ptr<storm::expressions::ExpressionManager> manager = solver->getManager().getSharedPointer();
+            solver->add(countConstr->toExpression(varNames, manager));
+            solver->add(timepointConstr->toExpression(varNames, manager));
+            storm::solver::SmtSolver::CheckResult res = solver->check();
+            solver->pop();
+            return res;
+        }
+
+        uint64_t DFTASFChecker::correctLowerBound(uint64_t bound, uint_fast64_t timeout) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            STORM_LOG_DEBUG("Lower bound correction - try to correct bound " << std::to_string(bound));
+            uint64_t boundCandidate = bound;
+            uint64_t nrDepEvents = 0;
+            uint64_t nrNonMarkovian = 0;
+            // Count dependent events
+            for (size_t i = 0; i < dft.nrElements(); ++i) {
+                std::shared_ptr<storm::storage::DFTElement<ValueType> const> element = dft.getElement(i);
+                if (element->isBasicElement()) {
+                    auto be = std::static_pointer_cast<storm::storage::DFTBE<double> const>(element);
+                    if (be->hasIngoingDependencies()) {
+                        ++nrDepEvents;
+                    }
+                }
+            }
+            // Only need to check as long as bound candidate + nr of non-Markovians to check is smaller than number of dependent events
+            while (nrNonMarkovian <= nrDepEvents && boundCandidate > 0) {
+                STORM_LOG_TRACE(
+                        "Lower bound correction - check possible bound " << std::to_string(boundCandidate) << " with "
+                                                                         << std::to_string(nrNonMarkovian)
+                                                                         << " non-Markovian states");
+                setSolverTimeout(timeout * 1000);
+                storm::solver::SmtSolver::CheckResult tmp_res =
+                        checkFailsLeqWithEqNonMarkovianState(boundCandidate + nrNonMarkovian, nrNonMarkovian);
+                unsetSolverTimeout();
+                switch (tmp_res) {
+                    case storm::solver::SmtSolver::CheckResult::Sat:
+                        /* If SAT, there is a sequence where only boundCandidate-many BEs fail directly and rest is nonMarkovian.
+                         * Bound candidate is vaild, therefore check the next one */
+                        STORM_LOG_TRACE("Lower bound correction - SAT");
+                        --boundCandidate;
+                        break;
+                    case storm::solver::SmtSolver::CheckResult::Unknown:
+                        // If any query returns unknown, we cannot be sure about the bound and fall back to the naive one
+                        STORM_LOG_DEBUG("Lower bound correction - Solver returned 'Unknown', corrected to 1");
+                        return 1;
+                    default:
+                        // if query is UNSAT, increase number of non-Markovian states and try again
+                        STORM_LOG_TRACE("Lower bound correction - UNSAT");
+                        ++nrNonMarkovian;
+                        break;
+                }
+            }
+            // if for one candidate all queries are UNSAT, it is not valid. Return last valid candidate
+            STORM_LOG_DEBUG("Lower bound correction - corrected bound to " << std::to_string(boundCandidate + 1));
+            return boundCandidate + 1;
+        }
+
+        uint64_t DFTASFChecker::correctUpperBound(uint64_t bound, uint_fast64_t timeout) {
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            STORM_LOG_DEBUG("Upper bound correction - try to correct bound " << std::to_string(bound));
+
+            while (bound > 1) {
+                setSolverTimeout(timeout * 1000);
+                storm::solver::SmtSolver::CheckResult tmp_res =
+                        checkFailsAtTimepointWithOnlyMarkovianState(bound);
+                unsetSolverTimeout();
+                switch (tmp_res) {
+                    case storm::solver::SmtSolver::CheckResult::Sat:
+                        STORM_LOG_DEBUG("Upper bound correction - corrected bound to " << std::to_string(bound));
+                        return bound;
+                    case storm::solver::SmtSolver::CheckResult::Unknown:
+                        STORM_LOG_DEBUG("Upper bound correction - Solver returned 'Unknown', corrected to ");
+                        return bound;
+                    default:
+                        --bound;
+                        break;
+
+                }
+            }
+            STORM_LOG_DEBUG("Upper bound correction - corrected bound to " << std::to_string(bound));
+            return bound;
+        }
+
+        uint64_t DFTASFChecker::getLeastFailureBound(uint_fast64_t timeout) {
+            STORM_LOG_TRACE("Compute lower bound for number of BE failures necessary for the DFT to fail");
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            uint64_t bound = 0;
+            while (bound < notFailed) {
+                setSolverTimeout(timeout * 1000);
+                storm::solver::SmtSolver::CheckResult tmp_res = checkTleFailsWithLeq(bound);
+                unsetSolverTimeout();
+                switch (tmp_res) {
+                    case storm::solver::SmtSolver::CheckResult::Sat:
+                        if (!dft.getDependencies().empty()) {
+                            return correctLowerBound(bound, timeout);
+                        } else {
+                            return bound;
+                        }
+                    case storm::solver::SmtSolver::CheckResult::Unknown:
+                        STORM_LOG_DEBUG("Lower bound: Solver returned 'Unknown'");
+                        return bound;
+                    default:
+                        ++bound;
+                        break;
+                }
+
+            }
+            return bound;
+        }
+
+        uint64_t DFTASFChecker::getAlwaysFailedBound(uint_fast64_t timeout) {
+            STORM_LOG_TRACE("Compute bound for number of BE failures such that the DFT always fails");
+            STORM_LOG_ASSERT(solver, "SMT Solver was not initialized, call toSolver() before checking queries");
+            if (checkTleNeverFailed() == storm::solver::SmtSolver::CheckResult::Sat) {
+                return notFailed;
+            }
+            uint64_t bound = notFailed - 1;
+            while (bound >= 0) {
+                setSolverTimeout(timeout * 1000);
+                storm::solver::SmtSolver::CheckResult tmp_res = checkTleFailsWithEq(bound);
+                unsetSolverTimeout();
+                switch (tmp_res) {
+                    case storm::solver::SmtSolver::CheckResult::Sat:
+                        if (!dft.getDependencies().empty()) {
+                            return correctUpperBound(bound, timeout);
+                        } else {
+                            return bound;
+                        }
+                    case storm::solver::SmtSolver::CheckResult::Unknown:
+                        STORM_LOG_DEBUG("Upper bound: Solver returned 'Unknown'");
+                        return bound;
+                    default:
+                        --bound;
+                        break;
+                }
+            }
+            return bound;
+        }
     }
 }
diff --git a/src/storm-dft/modelchecker/dft/DFTASFChecker.h b/src/storm-dft/modelchecker/dft/DFTASFChecker.h
index ee53b152d..84310b775 100644
--- a/src/storm-dft/modelchecker/dft/DFTASFChecker.h
+++ b/src/storm-dft/modelchecker/dft/DFTASFChecker.h
@@ -4,53 +4,153 @@
 #include <vector>
 #include <unordered_map>
 
+#include "storm/solver/SmtSolver.h"
+#include "SmtConstraint.h"
 #include "storm-dft/storage/dft/DFT.h"
-
+#include "storm/utility/solver.h"
 
 namespace storm {
     namespace modelchecker {
-        class DFTConstraint {
+        class SpareAndChildPair {
         public:
-            virtual ~DFTConstraint() {
+            SpareAndChildPair(uint64_t spareIndex, uint64_t childIndex) : spareIndex(spareIndex), childIndex(childIndex) {
             }
             
-            virtual std::string toSmtlib2(std::vector<std::string> const& varNames) const = 0;
-
-            virtual std::string description() const {
-                return descript;
+            friend bool operator<(SpareAndChildPair const& p1, SpareAndChildPair const& p2) {
+                return p1.spareIndex < p2.spareIndex || (p1.spareIndex == p2.spareIndex && p1.childIndex < p2.childIndex);
             }
 
-            void setDescription(std::string const& descr) {
-                descript = descr;
-            }
-            
         private:
-            std::string descript;
+            uint64_t spareIndex;
+            uint64_t childIndex;
         };
-        
-        class SpareAndChildPair {
+
+        class DependencyPair {
         public:
-            SpareAndChildPair(uint64_t spareIndex, uint64_t childIndex) : spareIndex(spareIndex), childIndex(childIndex) {
+            DependencyPair(uint64_t depIndex, uint64_t childIndex) : depIndex(depIndex), childIndex(childIndex) {
             }
-            
-            friend bool operator<(SpareAndChildPair const& p1, SpareAndChildPair const& p2) {
-                return p1.spareIndex < p2.spareIndex || (p1.spareIndex == p2.spareIndex && p1.childIndex < p2.childIndex);
+
+            friend bool operator<(DependencyPair const &p1, DependencyPair const &p2) {
+                return p1.depIndex < p2.depIndex || (p1.depIndex == p2.depIndex && p1.childIndex < p2.childIndex);
             }
 
         private:
-            uint64_t spareIndex;
+            uint64_t depIndex;
             uint64_t childIndex;
         };
-        
-        
+
+
         class DFTASFChecker {
             using ValueType = double;
         public:
             DFTASFChecker(storm::storage::DFT<ValueType> const&);
+            /**
+             * Generate general variables and constraints for the DFT and store them in the corresponding maps and vectors
+             *
+             */
             void convert();
             void toFile(std::string const&);
+
+            /**
+             * Generates a new solver instance and prepares it for SMT checking of the DFT. Needs to be called before all queries to the solver
+             */
+            void toSolver();
+
+            /**
+             * Check if the TLE of the DFT never fails
+             *
+             * @return  "Sat" if TLE never fails, "Unsat" if it does, otherwise "Unknown"
+             */
+            storm::solver::SmtSolver::CheckResult checkTleNeverFailed();
+
+            /**
+             * Check if there exists a sequence of BE failures of exactly given length such that the TLE of the DFT fails
+             *
+             * @param bound the length of the sequene
+             * @return "Sat" if such a sequence exists, "Unsat" if it does not, otherwise "Unknown"
+             */
+            storm::solver::SmtSolver::CheckResult checkTleFailsWithEq(uint64_t bound);
+
+            /**
+             * Check if there exists a sequence of BE failures of at least given length such that the TLE of the DFT fails
+             *
+             * @param bound the length of the sequence
+             * @return "Sat" if such a sequence exists, "Unsat" if it does not, otherwise "Unknown"
+             */
+            storm::solver::SmtSolver::CheckResult checkTleFailsWithLeq(uint64_t bound);
+
+            /**
+             * Get the minimal number of BEs necessary for the TLE to fail (lower bound for number of failures to check)
+             *
+             * @param timeout timeout for each query in seconds, defaults to 10 seconds
+             * @return the minimal number
+             */
+            uint64_t getLeastFailureBound(uint_fast64_t timeout = 10);
+
+            /**
+             * Get the number of BE failures for which the TLE always fails (upper bound for number of failures to check).
+             * Note that the returned value may be higher than the real one when dependencies are present.
+             *
+             * @param timeout timeout for each query in seconds, defaults to 10 seconds
+             * @return the number
+             */
+            uint64_t getAlwaysFailedBound(uint_fast64_t timeout = 10);
+
+            /**
+             * Set the timeout of the solver
+             *
+             * @param milliseconds the timeout in milliseconds
+             */
+            void setSolverTimeout(uint_fast64_t milliseconds);
+
+            /**
+             * Unset the timeout for the solver
+             */
+            void unsetSolverTimeout();
             
         private:
+            /**
+             * Helper function to check if the TLE fails before or at a given timepoint while visiting exactly
+             * a given number of non-Markovian states
+             *
+             * @param checkbound timepoint to check against
+             * @param nrNonMarkovian the number of non-Markovian states to check against
+             * @return "Sat" if a sequence of BE failures exists such that the constraints are satisfied,
+             * "Unsat" if it does not, otherwise "Unknown"
+             */
+            storm::solver::SmtSolver::CheckResult
+            checkFailsLeqWithEqNonMarkovianState(uint64_t checkbound, uint64_t nrNonMarkovian);
+
+            /**
+             * Helper function that checks if the DFT can fail at a timepoint while visiting less than a given number of Markovian states
+             *
+             * @param timepoint point in time to check
+             * @return "Sat" if a sequence of BE failures exists such that less than checkNumber Markovian states are visited,
+             * "Unsat" if it does not, otherwise "Unknown"
+             */
+            storm::solver::SmtSolver::CheckResult checkFailsAtTimepointWithOnlyMarkovianState(uint64_t timepoint);
+
+            /**
+             * Helper function for correction of least failure bound when dependencies are present.
+             * The main idea is to check if a later point of failure for the TLE than the pre-computed bound exists, but
+             * up until that point the number of non-Markovian states visited is so large, that less than the pre-computed bound BEs fail by themselves.
+             * The corrected bound is then (newTLEFailureTimepoint)-(nrNonMarkovianStatesVisited). This term is minimized.
+             *
+             * @param bound known lower bound to be corrected
+             * @param timeout timeout timeout for each query in seconds
+             * @return the corrected bound
+             */
+            uint64_t correctLowerBound(uint64_t bound, uint_fast64_t timeout);
+
+            /**
+             * Helper function for correction of bound for number of BEs such that the DFT always fails when dependencies are present
+             *
+             * @param bound known bound to be corrected
+             * @param timeout timeout timeout for each query in seconds
+             * @return the corrected bound
+             */
+            uint64_t correctUpperBound(uint64_t bound, uint_fast64_t timeout);
+
             uint64_t getClaimVariableIndex(uint64_t spareIndex, uint64_t childIndex) const;
 
             /**
@@ -63,7 +163,69 @@ namespace storm {
              *
              * @return Constraint encoding the claiming.
              */
-            std::shared_ptr<DFTConstraint> generateTryToClaimConstraint(std::shared_ptr<storm::storage::DFTSpare<ValueType> const> spare, uint64_t childIndex, uint64_t timepoint) const;
+            std::shared_ptr<SmtConstraint>
+            generateTryToClaimConstraint(std::shared_ptr<storm::storage::DFTSpare<ValueType> const> spare,
+                                         uint64_t childIndex, uint64_t timepoint) const;
+
+            /**
+             * Add constraints encoding AND gates.
+             * This corresponds to constraint (1)
+             */
+            void generateAndConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                       std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+             * Add constraints encoding OR gates.
+             * This corresponds to constraint (2)
+             */
+            void generateOrConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                      std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+             * Add constraints encoding VOT gates.
+             */
+            void generateVotConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                       std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+             * Add constraints encoding PAND gates.
+             * This corresponds to constraint (3)
+             */
+            void generatePandConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                        std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+             * Add constraints encoding POR gates.
+             */
+            void generatePorConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                       std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+             * Add constraints encoding SEQ gates.
+             * This corresponds to constraint (4)
+             */
+            void generateSeqConstraint(std::vector<uint64_t> childVarIndices,
+                                       std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+            * Add constraints encoding SPARE gates.
+            * This corresponds to constraints (5),(6),(7)
+            */
+            void generateSpareConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                         std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+             * Add constraints encoding PDEP gates.
+             *
+             */
+            void generatePdepConstraint(size_t i, std::vector<uint64_t> childVarIndices,
+                                        std::shared_ptr<storm::storage::DFTElement<ValueType> const> element);
+
+            /**
+            * Add constraints encoding claiming rules.
+            * This corresponds to constraint (8) and addition
+            */
+            void addClaimingConstraints();
 
             /**
              * Add constraints encoding Markovian states.
@@ -72,10 +234,12 @@ namespace storm {
             void addMarkovianConstraints();
             
             storm::storage::DFT<ValueType> const& dft;
+            std::shared_ptr<storm::solver::SmtSolver> solver = nullptr;
             std::vector<std::string> varNames;
             std::unordered_map<uint64_t, uint64_t> timePointVariables;
-            std::vector<std::shared_ptr<DFTConstraint>> constraints;
+            std::vector<std::shared_ptr<SmtConstraint>> constraints;
             std::map<SpareAndChildPair, uint64_t> claimVariables;
+            std::unordered_map<uint64_t, uint64_t> dependencyVariables;
             std::unordered_map<uint64_t, uint64_t> markovianVariables;
             std::vector<uint64_t> tmpTimePointVariables;
             uint64_t notFailed;
diff --git a/src/storm-dft/modelchecker/dft/SmtConstraint.cpp b/src/storm-dft/modelchecker/dft/SmtConstraint.cpp
new file mode 100644
index 000000000..78e963d05
--- /dev/null
+++ b/src/storm-dft/modelchecker/dft/SmtConstraint.cpp
@@ -0,0 +1,701 @@
+#include "DFTASFChecker.h"
+#include <storm/storage/expressions/ExpressionManager.h>
+#include <string>
+
+namespace storm {
+
+    namespace modelchecker {
+
+        /*
+         * Variable[VarIndex] is the maximum of the others
+         */
+        class IsMaximum : public SmtConstraint {
+        public:
+            IsMaximum(uint64_t varIndex, std::vector<uint64_t> const &varIndices) : varIndex(varIndex),
+                                                                                    varIndices(varIndices) {
+            }
+
+            virtual ~IsMaximum() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(and ";
+                // assert it is largereq than all values.
+                for (auto const &ovi : varIndices) {
+                    sstr << "(>= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
+                }
+                // assert it is one of the values.
+                sstr << "(or ";
+                for (auto const &ovi : varIndices) {
+                    sstr << "(= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
+                }
+                sstr << ")"; // end of the or
+                sstr << ")"; // end outer and.
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> outerAnd;
+                std::vector<storm::expressions::Expression> innerOr;
+                for (auto const &ovi : varIndices) {
+                    outerAnd.push_back((manager->getVariableExpression(varNames.at(varIndex)) >=
+                                        manager->getVariableExpression(varNames.at(ovi))));
+                    innerOr.push_back((manager->getVariableExpression(varNames.at(varIndex)) ==
+                                       manager->getVariableExpression(varNames.at(ovi))));
+                }
+                outerAnd.push_back(disjunction(innerOr));
+                return conjunction(outerAnd);
+            }
+
+        private:
+            uint64_t varIndex;
+            std::vector<uint64_t> varIndices;
+        };
+
+
+        /*
+         * First is the minimum of the others
+         */
+        class IsMinimum : public SmtConstraint {
+        public:
+            IsMinimum(uint64_t varIndex, std::vector<uint64_t> const &varIndices) : varIndex(varIndex),
+                                                                                    varIndices(varIndices) {
+            }
+
+            virtual ~IsMinimum() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(and ";
+                // assert it is smallereq than all values.
+                for (auto const &ovi : varIndices) {
+                    sstr << "(<= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
+                }
+                // assert it is one of the values.
+                sstr << "(or ";
+                for (auto const &ovi : varIndices) {
+                    sstr << "(= " << varNames.at(varIndex) << " " << varNames.at(ovi) << ") ";
+                }
+                sstr << ")"; // end of the or
+                sstr << ")"; // end outer and.
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> outerAnd;
+                std::vector<storm::expressions::Expression> innerOr;
+                for (auto const &ovi : varIndices) {
+                    outerAnd.push_back((manager->getVariableExpression(varNames.at(varIndex)) <=
+                                        manager->getVariableExpression(varNames.at(ovi))));
+                    innerOr.push_back((manager->getVariableExpression(varNames.at(varIndex)) ==
+                                       manager->getVariableExpression(varNames.at(ovi))));
+                }
+                outerAnd.push_back(disjunction(innerOr));
+                return conjunction(outerAnd);
+            }
+
+        private:
+            uint64_t varIndex;
+            std::vector<uint64_t> varIndices;
+        };
+
+
+        class BetweenValues : public SmtConstraint {
+        public:
+            BetweenValues(uint64_t varIndex, uint64_t lower, uint64_t upper) : varIndex(varIndex), upperBound(upper),
+                                                                               lowerBound(lower) {
+            }
+
+            virtual ~BetweenValues() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(and ";
+                sstr << "(>= " << varNames.at(varIndex) << " " << lowerBound << ")";
+                sstr << "(<= " << varNames.at(varIndex) << " " << upperBound << ")";
+                sstr << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return (manager->getVariableExpression(varNames.at(varIndex)) >= lowerBound) &&
+                       (manager->getVariableExpression(varNames.at(varIndex)) <= upperBound);
+            }
+
+        private:
+            uint64_t varIndex;
+            uint64_t upperBound;
+            uint64_t lowerBound;
+        };
+
+
+        class And : public SmtConstraint {
+        public:
+            And(std::vector<std::shared_ptr<SmtConstraint>> const &constraints) : constraints(constraints) {
+            }
+
+            virtual ~And() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                if (constraints.empty()) {
+                    sstr << "true";
+                } else {
+                    sstr << "(and";
+                    for (auto const &c : constraints) {
+                        sstr << " " << c->toSmtlib2(varNames);
+                    }
+                    sstr << ")";
+                }
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                if (constraints.empty()) {
+                    return manager->boolean(true);
+                } else {
+                    std::vector<storm::expressions::Expression> conjuncts;
+                    for (auto const &c : constraints) {
+                        conjuncts.push_back(c->toExpression(varNames, manager));
+                    }
+                    return conjunction(conjuncts);
+                }
+            }
+
+        private:
+            std::vector<std::shared_ptr<SmtConstraint>> constraints;
+
+        };
+
+
+        class Or : public SmtConstraint {
+        public:
+            Or(std::vector<std::shared_ptr<SmtConstraint>> const &constraints) : constraints(constraints) {
+            }
+
+            virtual ~Or() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                if (constraints.empty()) {
+                    sstr << "false";
+                } else {
+                    sstr << "(or";
+                    for (auto const &c : constraints) {
+                        sstr << " " << c->toSmtlib2(varNames);
+                    }
+                    sstr << ")";
+                }
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                if (constraints.empty()) {
+                    return manager->boolean(false);
+                } else {
+                    std::vector<storm::expressions::Expression> disjuncts;
+                    for (auto const &c : constraints) {
+                        disjuncts.push_back(c->toExpression(varNames, manager));
+                    }
+                    return disjunction(disjuncts);
+                }
+            }
+
+        private:
+            std::vector<std::shared_ptr<SmtConstraint>> constraints;
+
+        };
+
+
+        class Implies : public SmtConstraint {
+        public:
+            Implies(std::shared_ptr<SmtConstraint> l, std::shared_ptr<SmtConstraint> r) : lhs(l), rhs(r) {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(=> " << lhs->toSmtlib2(varNames) << " " << rhs->toSmtlib2(varNames) << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return implies(lhs->toExpression(varNames, manager), rhs->toExpression(varNames, manager));
+            }
+
+        private:
+            std::shared_ptr<SmtConstraint> lhs;
+            std::shared_ptr<SmtConstraint> rhs;
+        };
+
+
+        class Iff : public SmtConstraint {
+        public:
+            Iff(std::shared_ptr<SmtConstraint> l, std::shared_ptr<SmtConstraint> r) : lhs(l), rhs(r) {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(= " << lhs->toSmtlib2(varNames) << " " << rhs->toSmtlib2(varNames) << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return iff(lhs->toExpression(varNames, manager), rhs->toExpression(varNames, manager));
+            }
+
+        private:
+            std::shared_ptr<SmtConstraint> lhs;
+            std::shared_ptr<SmtConstraint> rhs;
+        };
+
+
+        class IsTrue : public SmtConstraint {
+        public:
+            IsTrue(bool val) : value(val) {
+            }
+
+            virtual ~IsTrue() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << (value ? "true" : "false");
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->boolean(value);
+            }
+
+        private:
+            bool value;
+        };
+
+
+        class IsBoolValue : public SmtConstraint {
+        public:
+            IsBoolValue(uint64_t varIndex, bool val) : varIndex(varIndex), value(val) {
+            }
+
+            virtual ~IsBoolValue() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                assert(varIndex < varNames.size());
+                if (value) {
+                    sstr << varNames.at(varIndex);
+                } else {
+                    sstr << "(not " << varNames.at(varIndex) << ")";
+                }
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                if (value) {
+                    return manager->getVariableExpression(varNames.at(varIndex));
+                } else {
+                    return !(manager->getVariableExpression(varNames.at(varIndex)));
+                }
+            }
+
+        private:
+            uint64_t varIndex;
+            bool value;
+        };
+
+
+        class IsConstantValue : public SmtConstraint {
+        public:
+            IsConstantValue(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) {
+            }
+
+            virtual ~IsConstantValue() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                assert(varIndex < varNames.size());
+                sstr << "(= " << varNames.at(varIndex) << " " << value << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(varIndex)) == manager->integer(value);
+            }
+
+        private:
+            uint64_t varIndex;
+            uint64_t value;
+        };
+
+
+        class IsLessConstant : public SmtConstraint {
+        public:
+            IsLessConstant(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) {
+            }
+
+            virtual ~IsLessConstant() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                assert(varIndex < varNames.size());
+                sstr << "(< " << varNames.at(varIndex) << " " << value << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(varIndex)) < value;
+            }
+
+        private:
+            uint64_t varIndex;
+            uint64_t value;
+        };
+
+        class IsGreaterConstant : public SmtConstraint {
+        public:
+            IsGreaterConstant(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) {
+            }
+
+            virtual ~IsGreaterConstant() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                assert(varIndex < varNames.size());
+                sstr << "(< " << value << " " << varNames.at(varIndex) << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(varIndex)) > value;
+            }
+
+        private:
+            uint64_t varIndex;
+            uint64_t value;
+        };
+
+        class IsLessEqualConstant : public SmtConstraint {
+        public:
+            IsLessEqualConstant(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) {
+            }
+
+            virtual ~IsLessEqualConstant() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                assert(varIndex < varNames.size());
+                sstr << "(<= " << varNames.at(varIndex) << " " << value << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(varIndex)) <= value;
+            }
+
+        private:
+            uint64_t varIndex;
+            uint64_t value;
+        };
+
+        class IsGreaterEqualConstant : public SmtConstraint {
+        public:
+            IsGreaterEqualConstant(uint64_t varIndex, uint64_t val) : varIndex(varIndex), value(val) {
+            }
+
+            virtual ~IsGreaterEqualConstant() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                assert(varIndex < varNames.size());
+                sstr << "(<= " << value << " " << varNames.at(varIndex) << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(varIndex)) >= value;
+            }
+
+        private:
+            uint64_t varIndex;
+            uint64_t value;
+        };
+
+
+        class IsEqual : public SmtConstraint {
+        public:
+            IsEqual(uint64_t varIndex1, uint64_t varIndex2) : var1Index(varIndex1), var2Index(varIndex2) {
+            }
+
+            virtual ~IsEqual() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                return "(= " + varNames.at(var1Index) + " " + varNames.at(var2Index) + ")";
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(var1Index)) ==
+                       manager->getVariableExpression(varNames.at(var2Index));
+            }
+
+        private:
+            uint64_t var1Index;
+            uint64_t var2Index;
+        };
+
+
+        class IsLess : public SmtConstraint {
+        public:
+            IsLess(uint64_t varIndex1, uint64_t varIndex2) : var1Index(varIndex1), var2Index(varIndex2) {
+            }
+
+            virtual ~IsLess() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                return "(< " + varNames.at(var1Index) + " " + varNames.at(var2Index) + ")";
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return manager->getVariableExpression(varNames.at(var1Index)) <
+                       manager->getVariableExpression(varNames.at(var2Index));
+            }
+
+        private:
+            uint64_t var1Index;
+            uint64_t var2Index;
+        };
+
+
+        class PairwiseDifferent : public SmtConstraint {
+        public:
+            PairwiseDifferent(std::vector<uint64_t> const &indices) : varIndices(indices) {
+            }
+
+            virtual ~PairwiseDifferent() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(distinct";
+                //                for(uint64_t i = 0; i < varIndices.size(); ++i) {
+                //                    for(uint64_t j = i + 1; j < varIndices.size(); ++j) {
+                //                        sstr << "()";
+                //                    }
+                //                }
+                for (auto const &varIndex : varIndices) {
+                    sstr << " " << varNames.at(varIndex);
+                }
+                sstr << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> conjuncts;
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    for (uint64_t j = i + 1; j < varIndices.size(); ++j) {
+                        // check all elements pairwise for inequality
+                        conjuncts.push_back(manager->getVariableExpression(varNames.at(varIndices.at(i))) !=
+                                            manager->getVariableExpression(varNames.at(varIndices.at(j))));
+                    }
+                }
+                // take the conjunction of all pairwise inequalities
+                return conjunction(conjuncts);
+            }
+
+        private:
+            std::vector<uint64_t> varIndices;
+        };
+
+
+        class Sorted : public SmtConstraint {
+        public:
+            Sorted(std::vector<uint64_t> varIndices) : varIndices(varIndices) {
+            }
+
+            virtual ~Sorted() {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(and ";
+                for (uint64_t i = 1; i < varIndices.size(); ++i) {
+                    sstr << "(<= " << varNames.at(varIndices.at(i - 1)) << " " << varNames.at(varIndices.at(i)) << ")";
+                }
+                sstr << ") ";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> conjuncts;
+                for (uint64_t i = 1; i < varIndices.size(); ++i) {
+                    conjuncts.push_back(manager->getVariableExpression(varNames.at(varIndices.at(i - 1))) <=
+                                        manager->getVariableExpression(varNames.at(varIndices.at(i))));
+                }
+                // take the conjunction of all pairwise inequalities
+                return conjunction(conjuncts);
+            }
+
+
+        private:
+            std::vector<uint64_t> varIndices;
+        };
+
+
+        class IfThenElse : public SmtConstraint {
+        public:
+            IfThenElse(std::shared_ptr<SmtConstraint> ifC, std::shared_ptr<SmtConstraint> thenC,
+                       std::shared_ptr<SmtConstraint> elseC) : ifConstraint(ifC), thenConstraint(thenC),
+                                                               elseConstraint(elseC) {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(ite " << ifConstraint->toSmtlib2(varNames) << " " << thenConstraint->toSmtlib2(varNames)
+                     << " " << elseConstraint->toSmtlib2(varNames) << ")";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                return ite(ifConstraint->toExpression(varNames, manager),
+                           thenConstraint->toExpression(varNames, manager),
+                           elseConstraint->toExpression(varNames, manager));
+            }
+
+        private:
+            std::shared_ptr<SmtConstraint> ifConstraint;
+            std::shared_ptr<SmtConstraint> thenConstraint;
+            std::shared_ptr<SmtConstraint> elseConstraint;
+        };
+
+        class TrueCountIsLessConstant : public SmtConstraint {
+        public:
+            TrueCountIsLessConstant(std::vector<uint64_t> varIndices, uint64_t val) : varIndices(varIndices),
+                                                                                      value(val) {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(< (+ ";
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    sstr << "(ite " << varNames.at(varIndices.at(i)) << " 1 0 )";
+                }
+                sstr << ") " << value << " )";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> boolToInt;
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    boolToInt.push_back(
+                            ite(manager->getVariableExpression(varNames.at(varIndices.at(i))), // If variable is true
+                                manager->integer(1), // set 1
+                                manager->integer(0))); // else 0
+                }
+                return sum(boolToInt) < manager->integer(value);
+            }
+
+        private:
+            std::vector<uint64_t> varIndices;
+            uint64_t value;
+        };
+
+        class FalseCountIsEqualConstant : public SmtConstraint {
+        public:
+            FalseCountIsEqualConstant(std::vector<uint64_t> varIndices, uint64_t val) : varIndices(varIndices),
+                                                                                        value(val) {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(= (+ ";
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    sstr << "(ite " << varNames.at(varIndices.at(i)) << " 0 1 )";
+                }
+                sstr << ") " << value << " )";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> boolToInt;
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    boolToInt.push_back(
+                            ite(manager->getVariableExpression(varNames.at(varIndices.at(i))), // If variable is true
+                                manager->integer(0), // set 0
+                                manager->integer(1))); // else 1
+                }
+                return sum(boolToInt) == manager->integer(value);
+            }
+
+        private:
+            std::vector<uint64_t> varIndices;
+            uint64_t value;
+        };
+
+        class TrueCountIsConstantValue : public SmtConstraint {
+        public:
+            TrueCountIsConstantValue(std::vector<uint64_t> varIndices, uint64_t val) : varIndices(varIndices),
+                                                                                       value(val) {
+            }
+
+            std::string toSmtlib2(std::vector<std::string> const &varNames) const override {
+                std::stringstream sstr;
+                sstr << "(= (+ ";
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    sstr << "(ite " << varNames.at(varIndices.at(i)) << " 1 0 )";
+                }
+                sstr << ") " << value << " )";
+                return sstr.str();
+            }
+
+            storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                        std::shared_ptr<storm::expressions::ExpressionManager> manager) const override {
+                std::vector<storm::expressions::Expression> boolToInt;
+                for (uint64_t i = 0; i < varIndices.size(); ++i) {
+                    boolToInt.push_back(
+                            ite(manager->getVariableExpression(varNames.at(varIndices.at(i))), // If variable is true
+                                manager->integer(1), // set 1
+                                manager->integer(0))); // else 0
+                }
+                return sum(boolToInt) == manager->integer(value);
+            }
+
+        private:
+            std::vector<uint64_t> varIndices;
+            uint64_t value;
+        };
+    }
+}
+
diff --git a/src/storm-dft/modelchecker/dft/SmtConstraint.h b/src/storm-dft/modelchecker/dft/SmtConstraint.h
new file mode 100644
index 000000000..56c66ca73
--- /dev/null
+++ b/src/storm-dft/modelchecker/dft/SmtConstraint.h
@@ -0,0 +1,39 @@
+#include <string>
+#include <storm/storage/expressions/Expression.h>
+
+namespace storm {
+    namespace modelchecker {
+        class SmtConstraint {
+        public:
+            virtual ~SmtConstraint() {
+            }
+
+            /** Generate a string describing the constraint in Smtlib2 format
+             *
+             * @param varNames vector of variable names
+             * @return Smtlib2 format string
+             */
+            virtual std::string toSmtlib2(std::vector<std::string> const &varNames) const = 0;
+
+            /** Generate an expression describing the constraint in Storm format
+             *
+             * @param varNames vector of variable names
+             * @param manager the expression manager used to handle the expressions
+             * @return the expression
+             */
+            virtual storm::expressions::Expression toExpression(std::vector<std::string> const &varNames,
+                                                                std::shared_ptr<storm::expressions::ExpressionManager> manager) const = 0;
+
+            virtual std::string description() const {
+                return descript;
+            }
+
+            void setDescription(std::string const &descr) {
+                descript = descr;
+            }
+
+        private:
+            std::string descript;
+        };
+    }
+}
\ No newline at end of file
diff --git a/src/storm-dft/settings/modules/DftIOSettings.cpp b/src/storm-dft/settings/modules/DftIOSettings.cpp
index 2c4caeb8e..6a039341a 100644
--- a/src/storm-dft/settings/modules/DftIOSettings.cpp
+++ b/src/storm-dft/settings/modules/DftIOSettings.cpp
@@ -25,6 +25,7 @@ namespace storm {
             const std::string DftIOSettings::minValueOptionName = "min";
             const std::string DftIOSettings::maxValueOptionName = "max";
             const std::string DftIOSettings::exportToJsonOptionName = "export-json";
+            const std::string DftIOSettings::exportToSmtOptionName = "export-smt";
             const std::string DftIOSettings::displayStatsOptionName = "show-dft-stats";
 
 
@@ -56,6 +57,11 @@ namespace storm {
                 this->addOption(storm::settings::OptionBuilder(moduleName, exportToJsonOptionName, false, "Export the model to the Cytoscape JSON format.")
                                         .addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename", "The name of the JSON file to export to.").build())
                                         .build());
+                this->addOption(storm::settings::OptionBuilder(moduleName, exportToSmtOptionName, false,
+                                                               "Export the model as SMT encoding to the smtlib2 format.")
+                                        .addArgument(storm::settings::ArgumentBuilder::createStringArgument("filename",
+                                                                                                            "The name of the smtlib2 file to export to.").build())
+                                        .build());
                 this->addOption(storm::settings::OptionBuilder(moduleName, displayStatsOptionName, false, "Print stats to stdout").build());
             }
 
@@ -122,6 +128,14 @@ namespace storm {
                 return this->getOption(exportToJsonOptionName).getArgumentByName("filename").getValueAsString();
             }
 
+            bool DftIOSettings::isExportToSmt() const {
+                return this->getOption(exportToSmtOptionName).getHasOptionBeenSet();
+            }
+
+            std::string DftIOSettings::getExportSmtFilename() const {
+                return this->getOption(exportToSmtOptionName).getArgumentByName("filename").getValueAsString();
+            }
+
             bool DftIOSettings::isDisplayStatsSet() const {
                 return this->getOption(displayStatsOptionName).getHasOptionBeenSet();
             }
diff --git a/src/storm-dft/settings/modules/DftIOSettings.h b/src/storm-dft/settings/modules/DftIOSettings.h
index d99e6d970..60369b36e 100644
--- a/src/storm-dft/settings/modules/DftIOSettings.h
+++ b/src/storm-dft/settings/modules/DftIOSettings.h
@@ -108,6 +108,13 @@ namespace storm {
                  */
                 bool isExportToJson() const;
 
+                /*!
+                 * Retrieves whether the export to smtlib2 file option was set.
+                 *
+                 * @return True if the export to smtlib2 file option was set.
+                 */
+                bool isExportToSmt() const;
+
                 /*!
                  * Retrieves the name of the json file to export to.
                  *
@@ -115,6 +122,13 @@ namespace storm {
                  */
                 std::string getExportJsonFilename() const;
 
+                /*!
+                 * Retrieves the name of the smtlib2 file to export to.
+                 *
+                 * @return The name of the smtlib2 file to export to.
+                 */
+                std::string getExportSmtFilename() const;
+
                 /*!
                  * Retrieves whether statistics for the DFT should be displayed.
                  *
@@ -142,6 +156,7 @@ namespace storm {
                 static const std::string minValueOptionName;
                 static const std::string maxValueOptionName;
                 static const std::string exportToJsonOptionName;
+                static const std::string exportToSmtOptionName;
                 static const std::string displayStatsOptionName;
 
             };
diff --git a/src/storm-dft/settings/modules/FaultTreeSettings.cpp b/src/storm-dft/settings/modules/FaultTreeSettings.cpp
index 140b0f175..b45c37498 100644
--- a/src/storm-dft/settings/modules/FaultTreeSettings.cpp
+++ b/src/storm-dft/settings/modules/FaultTreeSettings.cpp
@@ -24,24 +24,35 @@ namespace storm {
             const std::string FaultTreeSettings::approximationErrorOptionName = "approximation";
             const std::string FaultTreeSettings::approximationErrorOptionShortName = "approx";
             const std::string FaultTreeSettings::approximationHeuristicOptionName = "approximationheuristic";
+            const std::string FaultTreeSettings::maxDepthOptionName = "maxdepth";
             const std::string FaultTreeSettings::firstDependencyOptionName = "firstdep";
 #ifdef STORM_HAVE_Z3
             const std::string FaultTreeSettings::solveWithSmtOptionName = "smt";
 #endif
 
             FaultTreeSettings::FaultTreeSettings() : ModuleSettings(moduleName) {
-                this->addOption(storm::settings::OptionBuilder(moduleName, symmetryReductionOptionName, false, "Exploit symmetric structure of model.").setShortName(symmetryReductionOptionShortName).build());
+                this->addOption(storm::settings::OptionBuilder(moduleName, symmetryReductionOptionName, false, "Exploit symmetric structure of model.").setShortName(
+                        symmetryReductionOptionShortName).build());
                 this->addOption(storm::settings::OptionBuilder(moduleName, modularisationOptionName, false, "Use modularisation (not applicable for expected time).").build());
                 this->addOption(storm::settings::OptionBuilder(moduleName, disableDCOptionName, false, "Disable Don't Care propagation.").build());
-                this->addOption(storm::settings::OptionBuilder(moduleName, firstDependencyOptionName, false, "Avoid non-determinism by always taking the first possible dependency.").build());
+                this->addOption(storm::settings::OptionBuilder(moduleName, firstDependencyOptionName, false,
+                                                               "Avoid non-determinism by always taking the first possible dependency.").build());
                 this->addOption(storm::settings::OptionBuilder(moduleName, relevantEventsOptionName, false, "Specifies the relevant events from the DFT.")
-                    .addArgument(storm::settings::ArgumentBuilder::createStringArgument("values", "A comma separated list of names of relevant events. 'all' marks all events as relevant, The default '' or 'none' marks only the top level event as relevant.").setDefaultValueString("").build()).build());
+                                        .addArgument(storm::settings::ArgumentBuilder::createStringArgument("values",
+                                                                                                            "A comma separated list of names of relevant events. 'all' marks all events as relevant, The default '' or 'none' marks only the top level event as relevant.").setDefaultValueString(
+                                                "").build()).build());
                 this->addOption(storm::settings::OptionBuilder(moduleName, allowDCRelevantOptionName, false, "Allow Don't Care propagation for relevant events.").build());
-                this->addOption(storm::settings::OptionBuilder(moduleName, approximationErrorOptionName, false, "Approximation error allowed.").setShortName(approximationErrorOptionShortName).addArgument(storm::settings::ArgumentBuilder::createDoubleArgument("error", "The relative approximation error to use.").addValidatorDouble(ArgumentValidatorFactory::createDoubleGreaterEqualValidator(0.0)).build()).build());
+                this->addOption(storm::settings::OptionBuilder(moduleName, approximationErrorOptionName, false, "Approximation error allowed.").setShortName(
+                        approximationErrorOptionShortName).addArgument(
+                        storm::settings::ArgumentBuilder::createDoubleArgument("error", "The relative approximation error to use.").addValidatorDouble(
+                                ArgumentValidatorFactory::createDoubleGreaterEqualValidator(0.0)).build()).build());
                 this->addOption(storm::settings::OptionBuilder(moduleName, approximationHeuristicOptionName, false, "Set the heuristic used for approximation.")
-                    .addArgument(storm::settings::ArgumentBuilder::createStringArgument("heuristic", "The name of the heuristic used for approximation.")
-                    .setDefaultValueString("depth")
-                    .addValidatorString(ArgumentValidatorFactory::createMultipleChoiceValidator({"depth", "probability", "bounddifference"})).build()).build());
+                                        .addArgument(storm::settings::ArgumentBuilder::createStringArgument("heuristic", "The name of the heuristic used for approximation.")
+                                                             .setDefaultValueString("depth")
+                                                             .addValidatorString(ArgumentValidatorFactory::createMultipleChoiceValidator(
+                                                                     {"depth", "probability", "bounddifference"})).build()).build());
+                this->addOption(storm::settings::OptionBuilder(moduleName, maxDepthOptionName, false, "Maximal depth for state space exploration.").addArgument(
+                        storm::settings::ArgumentBuilder::createUnsignedIntegerArgument("depth", "The maximal depth.").build()).build());
 #ifdef STORM_HAVE_Z3
                 this->addOption(storm::settings::OptionBuilder(moduleName, solveWithSmtOptionName, true, "Solve the DFT with SMT.").build());
 #endif
@@ -64,7 +75,8 @@ namespace storm {
             }
 
             bool FaultTreeSettings::areRelevantEventsSet() const {
-                return this->getOption(relevantEventsOptionName).getHasOptionBeenSet() && (this->getOption(relevantEventsOptionName).getArgumentByName("values").getValueAsString() != "");
+                return this->getOption(relevantEventsOptionName).getHasOptionBeenSet() &&
+                       (this->getOption(relevantEventsOptionName).getArgumentByName("values").getValueAsString() != "");
             }
 
             std::vector<std::string> FaultTreeSettings::getRelevantEvents() const {
@@ -91,14 +103,24 @@ namespace storm {
                 STORM_LOG_THROW(false, storm::exceptions::IllegalArgumentValueException, "Illegal value '" << heuristicAsString << "' set as heuristic for approximation.");
             }
 
+            bool FaultTreeSettings::isMaxDepthSet() const {
+                return this->getOption(maxDepthOptionName).getHasOptionBeenSet();
+            }
+
+            uint_fast64_t FaultTreeSettings::getMaxDepth() const {
+                return this->getOption(maxDepthOptionName).getArgumentByName("depth").getValueAsUnsignedInteger();
+            }
+
             bool FaultTreeSettings::isTakeFirstDependency() const {
                 return this->getOption(firstDependencyOptionName).getHasOptionBeenSet();
             }
 
 #ifdef STORM_HAVE_Z3
+
             bool FaultTreeSettings::solveWithSMT() const {
                 return this->getOption(solveWithSmtOptionName).getHasOptionBeenSet();
             }
+
 #endif
 
             void FaultTreeSettings::finalize() {
@@ -107,6 +129,8 @@ namespace storm {
             bool FaultTreeSettings::check() const {
                 // Ensure that disableDC and relevantEvents are not set at the same time
                 STORM_LOG_THROW(!isDisableDC() || !areRelevantEventsSet(), storm::exceptions::InvalidSettingsException, "DisableDC and relevantSets can not both be set.");
+                STORM_LOG_THROW(!isMaxDepthSet() || getApproximationHeuristic() == storm::builder::ApproximationHeuristic::DEPTH, storm::exceptions::InvalidSettingsException,
+                                "Maximal depth requires approximation heuristic depth.");
                 return true;
             }
 
diff --git a/src/storm-dft/settings/modules/FaultTreeSettings.h b/src/storm-dft/settings/modules/FaultTreeSettings.h
index 16a726e04..5ed230bfb 100644
--- a/src/storm-dft/settings/modules/FaultTreeSettings.h
+++ b/src/storm-dft/settings/modules/FaultTreeSettings.h
@@ -82,6 +82,20 @@ namespace storm {
                  */
                 storm::builder::ApproximationHeuristic getApproximationHeuristic() const;
 
+                /*!
+                 * Retrieves whether the option to set a maximal exploration depth is set.
+                 *
+                 * @return True iff the option was set.
+                 */
+                bool isMaxDepthSet() const;
+
+                /*!
+                 * Retrieves the maximal exploration depth.
+                 *
+                 * @return The maximal exploration depth.
+                 */
+                uint_fast64_t getMaxDepth() const;
+
                 /*!
                  * Retrieves whether the non-determinism should be avoided by always taking the first possible dependency.
                  *
@@ -118,6 +132,7 @@ namespace storm {
                 static const std::string approximationErrorOptionName;
                 static const std::string approximationErrorOptionShortName;
                 static const std::string approximationHeuristicOptionName;
+                static const std::string maxDepthOptionName;
                 static const std::string firstDependencyOptionName;
 #ifdef STORM_HAVE_Z3
                 static const std::string solveWithSmtOptionName;
diff --git a/src/storm-dft/storage/dft/DFT.cpp b/src/storm-dft/storage/dft/DFT.cpp
index 0ae6db850..927343ebc 100644
--- a/src/storm-dft/storage/dft/DFT.cpp
+++ b/src/storm-dft/storage/dft/DFT.cpp
@@ -17,14 +17,13 @@ namespace storm {
     namespace storage {
 
         template<typename ValueType>
-        DFT<ValueType>::DFT(DFTElementVector const& elements, DFTElementPointer const& tle) : mElements(elements), mNrOfBEs(0), mNrOfSpares(0), mTopLevelIndex(tle->id()), mMaxSpareChildCount(0) {
+        DFT<ValueType>::DFT(DFTElementVector const& elements, DFTElementPointer const& tle) : mElements(elements), mNrOfBEs(0), mNrOfSpares(0), mNrRepresentatives(0), mTopLevelIndex(tle->id()), mMaxSpareChildCount(0) {
             // Check that ids correspond to indices in the element vector
             STORM_LOG_ASSERT(elementIndicesCorrect(), "Ids incorrect.");
-            size_t nrRepresentatives = 0;
 
             for (auto& elem : mElements) {
                 if (isRepresentative(elem->id())) {
-                    ++nrRepresentatives;
+                    ++mNrRepresentatives;
                 }
                 if(elem->isBasicElement()) {
                     ++mNrOfBEs;
@@ -81,13 +80,12 @@ namespace storm {
 
             //Reserve space for failed spares
             ++mMaxSpareChildCount;
-            size_t usageInfoBits = storm::utility::math::uint64_log2(mMaxSpareChildCount) + 1;
-            mStateVectorSize = nrElements() * 2 + mNrOfSpares * usageInfoBits + nrRepresentatives;
+            mStateVectorSize = DFTStateGenerationInfo::getStateVectorSize(nrElements(), mNrOfSpares, mNrRepresentatives, mMaxSpareChildCount);
         }
 
         template<typename ValueType>
         DFTStateGenerationInfo DFT<ValueType>::buildStateGenerationInfo(storm::storage::DFTIndependentSymmetries const& symmetries) const {
-            DFTStateGenerationInfo generationInfo(nrElements(), mMaxSpareChildCount);
+            DFTStateGenerationInfo generationInfo(nrElements(), mNrOfSpares, mNrRepresentatives, mMaxSpareChildCount);
             
             // Generate Pre and Post info for restrictions, and mutexes
             for(auto const& elem : mElements) {
@@ -199,7 +197,8 @@ namespace storm {
             STORM_LOG_TRACE(generationInfo);
             STORM_LOG_ASSERT(stateIndex == mStateVectorSize, "Id incorrect.");
             STORM_LOG_ASSERT(visited.full(), "Not all elements considered.");
-            
+            generationInfo.checkSymmetries();
+
             return generationInfo;
         }
         
diff --git a/src/storm-dft/storage/dft/DFT.h b/src/storm-dft/storage/dft/DFT.h
index 5c885ef59..53f901408 100644
--- a/src/storm-dft/storage/dft/DFT.h
+++ b/src/storm-dft/storage/dft/DFT.h
@@ -58,6 +58,7 @@ namespace storm {
             DFTElementVector mElements;
             size_t mNrOfBEs;
             size_t mNrOfSpares;
+            size_t mNrRepresentatives;
             size_t mTopLevelIndex;
             size_t mStateVectorSize;
             size_t mMaxSpareChildCount;
diff --git a/src/storm-dft/storage/dft/DFTState.cpp b/src/storm-dft/storage/dft/DFTState.cpp
index be66bc2a0..86cd79400 100644
--- a/src/storm-dft/storage/dft/DFTState.cpp
+++ b/src/storm-dft/storage/dft/DFTState.cpp
@@ -522,6 +522,8 @@ namespace storm {
                 do {
                     tmp = 0;
                     for (size_t i = 1; i < n; ++i) {
+                        STORM_LOG_ASSERT(symmetryIndices[i-1] + length <= mStatus.size(), "Symmetry index "<< symmetryIndices[i-1] << " + length " << length << " is larger than status vector " << mStatus.size());
+                        STORM_LOG_ASSERT(symmetryIndices[i] + length <= mStatus.size(), "Symmetry index "<< symmetryIndices[i] << " + length " << length << " is larger than status vector " << mStatus.size());
                         if (mStatus.compareAndSwap(symmetryIndices[i-1], symmetryIndices[i], length)) {
                             tmp = i;
                             changed = true;
diff --git a/src/storm-dft/storage/dft/DFTStateGenerationInfo.h b/src/storm-dft/storage/dft/DFTStateGenerationInfo.h
index ec2ed993a..eb345d0a0 100644
--- a/src/storm-dft/storage/dft/DFTStateGenerationInfo.h
+++ b/src/storm-dft/storage/dft/DFTStateGenerationInfo.h
@@ -5,36 +5,59 @@ namespace storm {
         class DFTStateGenerationInfo {
         private:
             const size_t mUsageInfoBits;
+            const size_t stateIndexSize;
             std::map<size_t, size_t> mSpareUsageIndex; // id spare -> index first bit in state
             std::map<size_t, size_t> mSpareActivationIndex; // id spare representative -> index in state
             std::vector<size_t> mIdToStateIndex; // id -> index first bit in state
             std::map<size_t, std::vector<size_t>> mSeqRestrictionPreElements; // id -> list of restriction pre elements
             std::map<size_t, std::vector<size_t>> mSeqRestrictionPostElements; // id -> list of restriction post elements
-            std::map<size_t, std::vector<size_t>> mMutexRestrictionElements; // id -> list of elments in the same mutexes
+            std::map<size_t, std::vector<size_t>> mMutexRestrictionElements; // id -> list of elements in the same mutexes
             std::vector<std::pair<size_t, std::vector<size_t>>> mSymmetries; // pair (length of symmetry group, vector indicating the starting points of the symmetry groups)
 
         public:
 
-            DFTStateGenerationInfo(size_t nrElements, size_t maxSpareChildCount) :
-                mUsageInfoBits(storm::utility::math::uint64_log2(maxSpareChildCount) + 1), 
-                mIdToStateIndex(nrElements)
-            {
+            DFTStateGenerationInfo(size_t nrElements, size_t nrOfSpares, size_t nrRepresentatives, size_t maxSpareChildCount) :
+                    mUsageInfoBits(getUsageInfoBits(maxSpareChildCount)),
+                    stateIndexSize(getStateVectorSize(nrElements, nrOfSpares, nrRepresentatives, maxSpareChildCount)),
+                    mIdToStateIndex(nrElements) {
                 STORM_LOG_ASSERT(maxSpareChildCount < pow(2, mUsageInfoBits), "Bit length incorrect.");
             }
 
+            /*!
+             * Get number of bits required to store claiming information for spares in binary format.
+             * @param maxSpareChildCount Maximal number of children of a spare.
+             * @return Number of bits required to store claiming information.
+             */
+            static size_t getUsageInfoBits(size_t maxSpareChildCount) {
+                return storm::utility::math::uint64_log2(maxSpareChildCount) + 1;
+            }
+
+            /*!
+             * Get length of BitVector capturing DFT state.
+             * @param nrElements Number of DFT elements.
+             * @param nrOfSpares Number of Spares (needed for claiming).
+             * @param nrRepresentatives Number of representatives (needed for activation).
+             * @param maxSpareChildCount Maximal number of children of a spare.
+             * @return Length of required BitVector.
+             */
+            static size_t getStateVectorSize(size_t nrElements, size_t nrOfSpares, size_t nrRepresentatives, size_t maxSpareChildCount) {
+                return nrElements * 2 + nrOfSpares * getUsageInfoBits(maxSpareChildCount) + nrRepresentatives;
+            }
+
             size_t usageInfoBits() const {
                 return mUsageInfoBits;
             }
 
             void addStateIndex(size_t id, size_t index) {
                 STORM_LOG_ASSERT(id < mIdToStateIndex.size(), "Id invalid.");
+                STORM_LOG_ASSERT(index < stateIndexSize, "Index invalid");
                 mIdToStateIndex[id] = index;
             }
 
             void setRestrictionPreElements(size_t id, std::vector<size_t> const& elems) {
                 mSeqRestrictionPreElements[id] = elems;
             }
-            
+
             void setRestrictionPostElements(size_t id, std::vector<size_t> const& elems) {
                 mSeqRestrictionPostElements[id] = elems;
             }
@@ -47,7 +70,7 @@ namespace storm {
                 STORM_LOG_ASSERT(mSeqRestrictionPreElements.count(index) > 0, "Index invalid.");
                 return mSeqRestrictionPreElements.at(index);
             }
-            
+
             std::vector<size_t> const& seqRestrictionPostElements(size_t index) const {
                 STORM_LOG_ASSERT(mSeqRestrictionPostElements.count(index) > 0, "Index invalid.");
                 return mSeqRestrictionPostElements.at(index);
@@ -57,12 +80,14 @@ namespace storm {
                 STORM_LOG_ASSERT(mMutexRestrictionElements.count(index) > 0, "Index invalid.");
                 return mMutexRestrictionElements.at(index);
             }
-            
+
             void addSpareActivationIndex(size_t id, size_t index) {
+                STORM_LOG_ASSERT(index < stateIndexSize, "Index invalid");
                 mSpareActivationIndex[id] = index;
             }
 
             void addSpareUsageIndex(size_t id, size_t index) {
+                STORM_LOG_ASSERT(index < stateIndexSize, "Index invalid");
                 mSpareUsageIndex[id] = index;
             }
 
@@ -84,7 +109,7 @@ namespace storm {
             void addSymmetry(size_t length, std::vector<size_t>& startingIndices) {
                 mSymmetries.push_back(std::make_pair(length, startingIndices));
             }
-            
+
             /**
              * Generate more symmetries by combining two symmetries
              */
@@ -94,21 +119,25 @@ namespace storm {
                     size_t childStart = mSymmetries[i].second[0];
                     size_t childLength = mSymmetries[i].first;
                     // Iterate over possible parents
-                    for (size_t j = i+1; j < mSymmetries.size(); ++j) {
+                    for (size_t j = i + 1; j < mSymmetries.size(); ++j) {
                         size_t parentStart = mSymmetries[j].second[0];
                         size_t parentLength = mSymmetries[j].first;
                         // Check if child lies in parent
                         if (parentStart <= childStart && childStart + childLength < parentStart + parentLength) {
+                            // We add the symmetry of the child to all symmetric elements in the parent
                             std::vector<std::vector<size_t>> newSymmetries;
+                            // Start iteration at 1, because symmetry for child at 0 is already included
                             for (size_t index = 1; index < mSymmetries[j].second.size(); ++index) {
-                                // Get symmetric start by applying the bijection
                                 std::vector<size_t> newStarts;
+                                // Apply child symmetry to all symmetric elements of parent
                                 for (size_t symmetryStarts : mSymmetries[i].second) {
-                                    newStarts.push_back(symmetryStarts + mSymmetries[j].second[index]);
+                                    // Get symmetric element by applying the bijection
+                                    size_t symmetryOffset = symmetryStarts - parentStart;
+                                    newStarts.push_back(mSymmetries[j].second[index] + symmetryOffset);
                                 }
                                 newSymmetries.push_back(newStarts);
                             }
-                            // Insert after child
+                            // Insert new symmetry after child
                             for (size_t index = 0; index < newSymmetries.size(); ++index) {
                                 mSymmetries.insert(mSymmetries.begin() + i + 1 + index, std::make_pair(childLength, newSymmetries[index]));
                             }
@@ -119,10 +148,21 @@ namespace storm {
                 }
             }
 
+            void checkSymmetries() {
+                for (auto pair : mSymmetries) {
+                    STORM_LOG_ASSERT(pair.first > 0, "Empty symmetry.");
+                    STORM_LOG_ASSERT(pair.first < stateIndexSize, "Symmetry too long.");
+                    for (size_t index : pair.second) {
+                        STORM_LOG_ASSERT(index < stateIndexSize, "Symmetry starting point " << index << " invalid.");
+                        STORM_LOG_ASSERT(index + pair.first < stateIndexSize, "Symmetry ending point " << index << " invalid.");
+                    }
+                }
+            }
+
             size_t getSymmetrySize() const {
                 return mSymmetries.size();
             }
-            
+
             bool hasSymmetries() const {
                 return !mSymmetries.empty();
             }
@@ -138,6 +178,8 @@ namespace storm {
             }
 
             friend std::ostream& operator<<(std::ostream& os, DFTStateGenerationInfo const& info) {
+                os << "StateGenerationInfo:" << std::endl;
+                os << "Length of state vector: " << info.stateIndexSize << std::endl;
                 os << "Id to state index:" << std::endl;
                 for (size_t id = 0; id < info.mIdToStateIndex.size(); ++id) {
                     os << id << " -> " << info.getStateIndex(id) << std::endl;
diff --git a/src/storm-pars/modelchecker/region/SparseDtmcParameterLiftingModelChecker.cpp b/src/storm-pars/modelchecker/region/SparseDtmcParameterLiftingModelChecker.cpp
index a199b3781..1b938f717 100644
--- a/src/storm-pars/modelchecker/region/SparseDtmcParameterLiftingModelChecker.cpp
+++ b/src/storm-pars/modelchecker/region/SparseDtmcParameterLiftingModelChecker.cpp
@@ -149,8 +149,8 @@ namespace storm {
             lowerResultBound = storm::utility::zero<ConstantType>();
             upperResultBound = storm::utility::one<ConstantType>();
             
-            // The solution of the min-max equation system will always be unique (assuming graph-preserving instantiations).
-            auto req = solverFactory->getRequirements(env, true, boost::none, true);
+            // The solution of the min-max equation system will always be unique (assuming graph-preserving instantiations, every induced DTMC has the same graph structure).
+            auto req = solverFactory->getRequirements(env, true, true, boost::none, true);
             req.clearBounds();
             STORM_LOG_THROW(!req.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + req.getEnabledRequirementsAsString() + " not checked.");
             solverFactory->setRequirementsChecked(true);
@@ -188,8 +188,8 @@ namespace storm {
             // We only know a lower bound for the result
             lowerResultBound = storm::utility::zero<ConstantType>();
         
-            // The solution of the min-max equation system will always be unique (assuming graph-preserving instantiations).
-            auto req = solverFactory->getRequirements(env, true, boost::none, true);
+            // The solution of the min-max equation system will always be unique (assuming graph-preserving instantiations, every induced DTMC has the same graph structure).
+            auto req = solverFactory->getRequirements(env, true, true, boost::none, true);
             req.clearLowerBounds();
             if (req.upperBounds()) {
                 solvingRequiresUpperRewardBounds = true;
@@ -258,6 +258,7 @@ namespace storm {
             } else {
                 auto solver = solverFactory->create(env, parameterLifter->getMatrix());
                 solver->setHasUniqueSolution();
+                solver->setHasNoEndComponents();
                 if (lowerResultBound) solver->setLowerBound(lowerResultBound.get());
                 if (upperResultBound) {
                     solver->setUpperBound(upperResultBound.get());
diff --git a/src/storm/builder/DdJaniModelBuilder.cpp b/src/storm/builder/DdJaniModelBuilder.cpp
index 699787ee8..27e1e586e 100644
--- a/src/storm/builder/DdJaniModelBuilder.cpp
+++ b/src/storm/builder/DdJaniModelBuilder.cpp
@@ -1975,13 +1975,13 @@ namespace storm {
             std::vector<storm::expressions::Variable> rewardVariables;
             if (options.isBuildAllRewardModelsSet()) {
                 for (auto const& rewExpr : model.getAllRewardModelExpressions()) {
-                    STORM_LOG_ERROR_COND(rewExpr.second.isVariable(), "The DD-builder can not build the non-trivial reward expression '" << rewExpr.second << "'.");
+                    STORM_LOG_THROW(!model.isNonTrivialRewardModelExpression(rewExpr.first), storm::exceptions::NotSupportedException, "The DD-builder can not build the non-trivial reward expression '" << rewExpr.second << "'.");
                     rewardVariables.push_back(rewExpr.second.getBaseExpression().asVariableExpression().getVariable());
                 }
             } else {
                 for (auto const& rewardModelName : options.getRewardModelNames()) {
+                    STORM_LOG_THROW(!model.isNonTrivialRewardModelExpression(rewardModelName), storm::exceptions::NotSupportedException, "The DD-builder can not build the non-trivial reward expression '" << rewardModelName << "'.");
                     auto const& rewExpr = model.getRewardModelExpression(rewardModelName);
-                    STORM_LOG_ERROR_COND(rewExpr.isVariable(), "The DD-builder can not build the non-trivial reward expression '" << rewExpr << "'.");
                     rewardVariables.push_back(rewExpr.getBaseExpression().asVariableExpression().getVariable());
                 }
             }
@@ -2070,6 +2070,7 @@ namespace storm {
             
             // Lift the transient edge destinations. We can do so, as we know that there are no assignment levels (because that's not supported anyway).
             if (preparedModel.hasTransientEdgeDestinationAssignments()) {
+                // This operation is correct as we are asserting that there are no assignment levels and no non-trivial reward expressions.
                 preparedModel.liftTransientEdgeDestinationAssignments();
             }
             
diff --git a/src/storm/generator/JaniNextStateGenerator.cpp b/src/storm/generator/JaniNextStateGenerator.cpp
index 185ebfa44..4f12d8d32 100644
--- a/src/storm/generator/JaniNextStateGenerator.cpp
+++ b/src/storm/generator/JaniNextStateGenerator.cpp
@@ -40,7 +40,7 @@ namespace storm {
         }
         
         template<typename ValueType, typename StateType>
-        JaniNextStateGenerator<ValueType, StateType>::JaniNextStateGenerator(storm::jani::Model const& model, NextStateGeneratorOptions const& options, bool) : NextStateGenerator<ValueType, StateType>(model.getExpressionManager(), options), model(model), rewardExpressions(), hasStateActionRewards(false) {
+        JaniNextStateGenerator<ValueType, StateType>::JaniNextStateGenerator(storm::jani::Model const& model, NextStateGeneratorOptions const& options, bool) : NextStateGenerator<ValueType, StateType>(model.getExpressionManager(), options), model(model), rewardExpressions(), hasStateActionRewards(false), evaluateRewardExpressionsAtEdges(false), evaluateRewardExpressionsAtDestinations(false) {
             STORM_LOG_THROW(!this->options.isBuildChoiceLabelsSet(), storm::exceptions::InvalidSettingsException, "JANI next-state generator cannot generate choice labels.");
 
             auto features = this->model.getModelFeatures();
@@ -54,11 +54,26 @@ namespace storm {
             }
             STORM_LOG_THROW(features.empty(), storm::exceptions::InvalidSettingsException, "The explicit next-state generator does not support the following model feature(s): " << features.toString() << ".");
 
-            // Preprocess the edge assignments:
-            if (this->model.usesAssignmentLevels()) {
+            // Get the reward expressions to be build. Also find out whether there is a non-trivial one.
+            bool hasNonTrivialRewardExpressions = false;
+            if (this->options.isBuildAllRewardModelsSet()) {
+                rewardExpressions = this->model.getAllRewardModelExpressions();
+                hasNonTrivialRewardExpressions = this->model.hasNonTrivialRewardExpression();
+            } else {
+                // Extract the reward models from the model based on the names we were given.
+                for (auto const& rewardModelName : this->options.getRewardModelNames()) {
+                    rewardExpressions.emplace_back(rewardModelName, this->model.getRewardModelExpression(rewardModelName));
+                    hasNonTrivialRewardExpressions = hasNonTrivialRewardExpressions || this->model.isNonTrivialRewardModelExpression(rewardModelName);
+                }
+            }
+            
+            // We try to lift the edge destination assignments to the edges as this reduces the number of evaluator calls.
+            // However, this will only be helpful if there are no assignment levels and only trivial reward expressions.
+            if (hasNonTrivialRewardExpressions || this->model.usesAssignmentLevels()) {
                 this->model.pushEdgeAssignmentsToDestinations();
             } else {
                 this->model.liftTransientEdgeDestinationAssignments(storm::jani::AssignmentLevelFinder().getLowestAssignmentLevel(this->model));
+                evaluateRewardExpressionsAtEdges = true;
             }
             
             // Create all synchronization-related information, e.g. the automata that are put in parallel.
@@ -71,18 +86,10 @@ namespace storm {
             this->transientVariableInformation = TransientVariableInformation<ValueType>(this->model, this->parallelAutomata);
             this->transientVariableInformation.registerArrayVariableReplacements(arrayEliminatorData);
             
-            // Create a proper evalator.
+            // Create a proper evaluator.
             this->evaluator = std::make_unique<storm::expressions::ExpressionEvaluator<ValueType>>(this->model.getManager());
             this->transientVariableInformation.setDefaultValuesInEvaluator(*this->evaluator);
             
-            if (this->options.isBuildAllRewardModelsSet()) {
-                rewardExpressions = this->model.getAllRewardModelExpressions();
-            } else {
-                // Extract the reward models from the model based on the names we were given.
-                for (auto const& rewardModelName : this->options.getRewardModelNames()) {
-                    rewardExpressions.emplace_back(rewardModelName, this->model.getRewardModelExpression(rewardModelName));
-                }
-            }
             
             // Build the information structs for the reward models.
             buildRewardModelInformation();
@@ -541,18 +548,19 @@ namespace storm {
             }
             
             Choice<ValueType> choice(edge.getActionIndex(), static_cast<bool>(exitRate));
+            std::vector<ValueType> stateActionRewards;
             
             // Perform the transient edge assignments and create the state action rewards
             TransientVariableValuation<ValueType> transientVariableValuation;
-            if (!edge.getAssignments().empty()) {
+            if (!evaluateRewardExpressionsAtEdges || edge.getAssignments().empty()) {
+                stateActionRewards.resize(rewardModelInformation.size(), storm::utility::zero<ValueType>());
+            } else {
                 for (int64_t assignmentLevel = edge.getAssignments().getLowestLevel(true); assignmentLevel <= edge.getAssignments().getHighestLevel(true); ++assignmentLevel) {
                     transientVariableValuation.clear();
                     applyTransientUpdate(transientVariableValuation, edge.getAssignments().getTransientAssignments(assignmentLevel), *this->evaluator);
                     transientVariableValuation.setInEvaluator(*this->evaluator, this->getOptions().isExplorationChecksSet());
                 }
-            }
-            std::vector<ValueType> stateActionRewards = evaluateRewardExpressions();
-            if (!edge.getAssignments().empty()) {
+                stateActionRewards = evaluateRewardExpressions();
                 transientVariableInformation.setDefaultValuesInEvaluator(*this->evaluator);
             }
             
@@ -591,8 +599,11 @@ namespace storm {
                             }
                         }
                     }
-                    
-                    addEvaluatedRewardExpressions(stateActionRewards, probability);
+                    if (evaluateRewardExpressionsAtDestinations) {
+                        unpackStateIntoEvaluator(newState, this->variableInformation, *this->evaluator);
+                        evaluatorChanged = true;
+                        addEvaluatedRewardExpressions(stateActionRewards, probability);
+                    }
                     
                     if (evaluatorChanged) {
                         // Restore the old variable valuation
@@ -650,7 +661,7 @@ namespace storm {
             
             // Perform the edge assignments (if there are any)
             TransientVariableValuation<ValueType> transientVariableValuation;
-            if (lowestEdgeAssignmentLevel <= highestEdgeAssignmentLevel) {
+            if (evaluateRewardExpressionsAtEdges && lowestEdgeAssignmentLevel <= highestEdgeAssignmentLevel) {
                 for (int64_t assignmentLevel = lowestEdgeAssignmentLevel; assignmentLevel <= highestEdgeAssignmentLevel; ++assignmentLevel) {
                     transientVariableValuation.clear();
                     for (uint_fast64_t i = 0; i < iteratorList.size(); ++i) {
@@ -718,7 +729,11 @@ namespace storm {
                         evaluatorChanged = true;
                         transientVariableValuation.setInEvaluator(*this->evaluator, this->getOptions().isExplorationChecksSet());
                     }
-                    addEvaluatedRewardExpressions(stateActionRewards, successorProbability);
+                    if (evaluateRewardExpressionsAtDestinations) {
+                        unpackStateIntoEvaluator(successorState, this->variableInformation, *this->evaluator);
+                        evaluatorChanged = true;
+                        addEvaluatedRewardExpressions(stateActionRewards, successorProbability);
+                    }
                     if (evaluatorChanged) {
                         // Restore the old state information
                         unpackStateIntoEvaluator(state, this->variableInformation, *this->evaluator);
@@ -977,11 +992,18 @@ namespace storm {
                 storm::jani::RewardModelInformation info(this->model, rewardModel.second);
                 rewardModelInformation.emplace_back(rewardModel.first, info.hasStateRewards(), false, false);
                 STORM_LOG_THROW(this->options.isScaleAndLiftTransitionRewardsSet() || !info.hasTransitionRewards(), storm::exceptions::NotSupportedException, "Transition rewards are not supported and a reduction to action-based rewards was not possible.");
+                if (info.hasTransitionRewards()) {
+                    evaluateRewardExpressionsAtDestinations = true;
+                }
                 if (info.hasActionRewards() || (this->options.isScaleAndLiftTransitionRewardsSet() && info.hasTransitionRewards())) {
                     hasStateActionRewards = true;
                     rewardModelInformation.back().setHasStateActionRewards();
                 }
             }
+            if (!hasStateActionRewards) {
+                evaluateRewardExpressionsAtDestinations = false;
+                evaluateRewardExpressionsAtEdges = false;
+            }
         }
         
         template<typename ValueType, typename StateType>
diff --git a/src/storm/generator/JaniNextStateGenerator.h b/src/storm/generator/JaniNextStateGenerator.h
index 350429944..a5bcb0450 100644
--- a/src/storm/generator/JaniNextStateGenerator.h
+++ b/src/storm/generator/JaniNextStateGenerator.h
@@ -167,6 +167,12 @@ namespace storm {
             /// A flag that stores whether at least one of the selected reward models has state-action rewards.
             bool hasStateActionRewards;
             
+            /// A flag that stores whether we shall evaluate reward expressions at edges
+            bool evaluateRewardExpressionsAtEdges;
+            
+            /// A flag that stores whether we shall evaluate reward expressions at edge destinations
+            bool evaluateRewardExpressionsAtDestinations;
+            
             /// Data from eliminated array expressions. These are required to keep references to array variables in LValues alive.
             storm::jani::ArrayEliminatorData arrayEliminatorData;
             
diff --git a/src/storm/logic/EventuallyFormula.cpp b/src/storm/logic/EventuallyFormula.cpp
index c8ed335e5..12a77d49b 100644
--- a/src/storm/logic/EventuallyFormula.cpp
+++ b/src/storm/logic/EventuallyFormula.cpp
@@ -57,10 +57,10 @@ namespace storm {
                 
         std::ostream& EventuallyFormula::writeToStream(std::ostream& out) const {
             out << "F ";
-            this->getSubformula().writeToStream(out);
             if (hasRewardAccumulation()) {
                 out << "[" << getRewardAccumulation() << "]";
             }
+            this->getSubformula().writeToStream(out);
             return out;
         }
     }
diff --git a/src/storm/modelchecker/csl/helper/SparseMarkovAutomatonCslHelper.cpp b/src/storm/modelchecker/csl/helper/SparseMarkovAutomatonCslHelper.cpp
index f813564c2..b1d8ffea9 100644
--- a/src/storm/modelchecker/csl/helper/SparseMarkovAutomatonCslHelper.cpp
+++ b/src/storm/modelchecker/csl/helper/SparseMarkovAutomatonCslHelper.cpp
@@ -310,13 +310,14 @@ namespace storm {
 
                     // Create solver.
                     storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
-                    storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, true, dir);
+                    storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, true, true, dir);
                     requirements.clearBounds();
                     STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
                     
                     if (numberOfProbabilisticChoices > 0) {
                         solver = minMaxLinearEquationSolverFactory.create(env, probMatrix);
                         solver->setHasUniqueSolution();
+                        solver->setHasNoEndComponents();
                         solver->setBounds(storm::utility::zero<ValueType>(), storm::utility::one<ValueType>());
                         solver->setRequirementsChecked();
                         solver->setCachingEnabled(true);
@@ -486,14 +487,15 @@ namespace storm {
                 }
                 
                 // Check for requirements of the solver.
-                // The solution is unique as we assume non-zeno MAs.
+                // The min-max system has no end components as we assume non-zeno MAs.
                 storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
-                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, true, dir);
+                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, true, true, dir);
                 requirements.clearBounds();
                 STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
                 
                 std::unique_ptr<storm::solver::MinMaxLinearEquationSolver<ValueType>> solver = minMaxLinearEquationSolverFactory.create(env, aProbabilistic);
                 solver->setHasUniqueSolution();
+                solver->setHasNoEndComponents();
                 solver->setBounds(storm::utility::zero<ValueType>(), storm::utility::one<ValueType>());
                 solver->setRequirementsChecked();
                 solver->setCachingEnabled(true);
@@ -819,12 +821,13 @@ namespace storm {
                 
                 // Check for requirements of the solver.
                 storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
-                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(underlyingSolverEnvironment, true, dir);
+                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(underlyingSolverEnvironment, true, true, dir);
                 requirements.clearBounds();
                 STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
 
                 std::unique_ptr<storm::solver::MinMaxLinearEquationSolver<ValueType>> solver = minMaxLinearEquationSolverFactory.create(underlyingSolverEnvironment, sspMatrix);
                 solver->setHasUniqueSolution();
+                solver->setHasNoEndComponents();
                 solver->setLowerBound(storm::utility::zero<ValueType>());
                 solver->setUpperBound(*std::max_element(lraValuesForEndComponents.begin(), lraValuesForEndComponents.end()));
                 solver->setRequirementsChecked();
@@ -1053,13 +1056,14 @@ namespace storm {
                     // Check for requirements of the solver.
                     // The solution is unique as we assume non-zeno MAs.
                     storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
-                    storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, true, dir);
+                    storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, true, true, dir);
                     requirements.clearLowerBounds();
                     STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
     
                     solver = minMaxLinearEquationSolverFactory.create(env, std::move(aProbabilistic));
                     solver->setLowerBound(storm::utility::zero<ValueType>());
                     solver->setHasUniqueSolution(true);
+                    solver->setHasNoEndComponents(true);
                     solver->setRequirementsChecked(true);
                     solver->setCachingEnabled(true);
                 }
diff --git a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.cpp b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.cpp
index fee459534..03789b0c3 100644
--- a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.cpp
+++ b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.cpp
@@ -9,6 +9,7 @@
 #include "storm/settings/modules/MultiObjectiveSettings.h"
 #include "storm/storage/SparseMatrix.h"
 #include "storm/storage/MaximalEndComponentDecomposition.h"
+#include "storm/storage/Scheduler.h"
 #include "storm/utility/graph.h"
 #include "storm/utility/solver.h"
 
@@ -155,7 +156,7 @@ namespace storm {
                 
                 std::vector<Point> foundPoints;
                 std::vector<Polytope> infeasableAreas;
-                checkRecursive(polytopeTree, eps, foundPoints, infeasableAreas, 0);
+                checkRecursive(env, polytopeTree, eps, foundPoints, infeasableAreas, 0);
                 
                 swCheck.stop();
                 std::cout << " done!" << std::endl;
@@ -202,91 +203,144 @@ namespace storm {
                 return processed;
             }
             
+            template <typename ValueType>
+            std::map<uint64_t, storm::expressions::Expression> processEc(storm::storage::MaximalEndComponent const& ec, storm::storage::SparseMatrix<ValueType> const& transitions, std::string const& varNameSuffix, std::vector<storm::expressions::Expression> const& choiceVars, storm::solver::LpSolver<ValueType>& lpModel) {
+                std::map<uint64_t, storm::expressions::Expression> ecStateVars, ecChoiceVars, ecFlowChoiceVars;
+                
+                // Compute an upper bound on the expected number of visits of the states in this ec.
+                // First get a lower bound l on the probability of a path that leaves this MEC. 1-l is an upper bound on Pr_s(X F s).
+                // The desired upper bound is thus 1/(1-(1-l)) = 1/l. See Baier et al., CAV'17
+                ValueType expVisitsUpperBound = storm::utility::one<ValueType>();
+                uint64_t numStates = 0;
+                for (auto const& stateChoices : ec) {
+                    ++numStates;
+                    ValueType minProb = storm::utility::one<ValueType>();
+                    for (auto const& choice : stateChoices.second) {
+                        for (auto const& transition : transitions.getRow(choice)) {
+                            if (!storm::utility::isZero(transition.getValue())) {
+                                minProb = std::min(minProb, transition.getValue());
+                            }
+                        }
+                    }
+                    expVisitsUpperBound *= minProb;
+                }
+                expVisitsUpperBound = storm::utility::one<ValueType>() / expVisitsUpperBound;
+                std::cout << "expVisits upper bound is " << expVisitsUpperBound  << "." << std::endl;
+                // create variables
+                for (auto const& stateChoices : ec) {
+                    ecStateVars.emplace(stateChoices.first, lpModel.addBoundedIntegerVariable("e" + std::to_string(stateChoices.first) + varNameSuffix, storm::utility::zero<ValueType>(), storm::utility::one<ValueType>()).getExpression());
+                    for (auto const& choice : stateChoices.second) {
+                        ecChoiceVars.emplace(choice, lpModel.addBoundedIntegerVariable("ec" + std::to_string(choice) + varNameSuffix, storm::utility::zero<ValueType>(), storm::utility::one<ValueType>()).getExpression());
+                        ecFlowChoiceVars.emplace(choice, lpModel.addBoundedContinuousVariable("f" + std::to_string(choice) + varNameSuffix, storm::utility::zero<ValueType>(), expVisitsUpperBound).getExpression());
+                    }
+                }
+                
+                // create constraints
+                std::map<uint64_t, std::vector<storm::expressions::Expression>> ins, outs;
+                for (auto const& stateChoices : ec) {
+                    std::vector<storm::expressions::Expression> ecChoiceVarsAtState;
+                    std::vector<storm::expressions::Expression> out;
+                    for (auto const& choice : stateChoices.second) {
+                        if (choiceVars[choice].isInitialized()) {
+                            lpModel.addConstraint("", ecChoiceVars[choice] <= choiceVars[choice]);
+                            lpModel.addConstraint("", ecFlowChoiceVars[choice] <= lpModel.getConstant(expVisitsUpperBound) * choiceVars[choice]);
+                        }
+                        ecChoiceVarsAtState.push_back(ecChoiceVars[choice]);
+                        out.push_back(ecFlowChoiceVars[choice]);
+                        for (auto const& transition : transitions.getRow(choice)) {
+                            if (!storm::utility::isZero(transition.getValue())) {
+                                lpModel.addConstraint("", ecChoiceVars[choice] <= ecStateVars[transition.getColumn()]);
+                                ins[transition.getColumn()].push_back(lpModel.getConstant(transition.getValue()) * ecFlowChoiceVars[choice]);
+                            }
+                        }
+                    }
+                    lpModel.addConstraint("", ecStateVars[stateChoices.first] == storm::expressions::sum(ecChoiceVarsAtState));
+                    out.push_back(lpModel.getConstant(expVisitsUpperBound) * ecStateVars[stateChoices.first]);
+                    // Iterate over choices that leave the ec
+                    for (uint64_t choice = transitions.getRowGroupIndices()[stateChoices.first]; choice < transitions.getRowGroupIndices()[stateChoices.first + 1]; ++choice) {
+                        if (stateChoices.second.find(choice) == stateChoices.second.end()) {
+                            assert(choiceVars[choice].isInitialized());
+                            out.push_back(lpModel.getConstant(expVisitsUpperBound) * choiceVars[choice]);
+                        }
+                    }
+                    outs.emplace(stateChoices.first, out);
+                }
+                for (auto const& stateChoices : ec) {
+                    auto in = ins.find(stateChoices.first);
+                    STORM_LOG_ASSERT(in != ins.end(), "ec state does not seem to have an incoming transition.");
+                    // Assume a uniform initial distribution
+                    in->second.push_back(lpModel.getConstant(storm::utility::one<ValueType>() / storm::utility::convertNumber<ValueType>(numStates)));
+                    auto out = outs.find(stateChoices.first);
+                    STORM_LOG_ASSERT(out != outs.end(), "out flow of ec state was not set.");
+                    lpModel.addConstraint("", storm::expressions::sum(in->second)<= storm::expressions::sum(out->second));
+                }
+
+                return ecStateVars;
+        }
+            
             template <typename ModelType, typename GeometryValueType>
-            std::vector<std::vector<storm::expressions::Variable>> DeterministicSchedsLpChecker<ModelType, GeometryValueType>::createEcVariables(std::vector<storm::expressions::Expression> const& choiceVars) {
-                auto one = lpModel->getConstant(storm::utility::one<ValueType>());
-                std::vector<std::vector<storm::expressions::Variable>> result(model.getNumberOfStates());
-                storm::storage::MaximalEndComponentDecomposition<ValueType> mecs(model);
+            std::vector<std::vector<storm::expressions::Expression>> DeterministicSchedsLpChecker<ModelType, GeometryValueType>::createEcVariables() {
+                std::vector<std::vector<storm::expressions::Expression>> result(objectiveHelper.size(), std::vector<storm::expressions::Expression>(model.getNumberOfStates()));
                 uint64_t ecCounter = 0;
+                auto backwardTransitions = model.getBackwardTransitions();
                 
+                // Get the choices that do not induce a value (i.e. reward) for all objectives
+                storm::storage::BitVector choicesWithValueZero(model.getNumberOfChoices(), true);
+                for (auto const& objHelper : objectiveHelper) {
+                    for (auto const& value : objHelper.getChoiceValueOffsets()) {
+                        STORM_LOG_ASSERT(!storm::utility::isZero(value.second), "Expected non-zero choice-value offset.");
+                        choicesWithValueZero.set(value.first, false);
+                    }
+                }
+                storm::storage::MaximalEndComponentDecomposition<ValueType> mecs(model.getTransitionMatrix(), backwardTransitions, storm::storage::BitVector(model.getNumberOfStates(), true), choicesWithValueZero);
+
                 for (auto const& mec : mecs) {
-                    // Create a submatrix for the current mec as well as a mapping to map back to the original states.
-                    storm::storage::BitVector mecStatesAsBitVector(model.getNumberOfStates(), false);
-                    storm::storage::BitVector mecChoicesAsBitVector(model.getNumberOfChoices(), false);
-                    for (auto const& stateChoices : mec) {
-                        mecStatesAsBitVector.set(stateChoices.first, true);
-                        for (auto const& choice : stateChoices.second) {
-                            mecChoicesAsBitVector.set(choice, true);
+                    std::map<std::set<uint64_t>, std::vector<uint64_t>> excludedStatesToObjIndex;
+                    for (uint64_t objIndex = 0; objIndex < objectiveHelper.size(); ++objIndex) {
+                        std::set<uint64_t> excludedStates;
+                        for (auto const& stateChoices : mec) {
+                            auto schedIndValueIt = objectiveHelper[objIndex].getSchedulerIndependentStateValues().find(stateChoices.first);
+                            if (schedIndValueIt != objectiveHelper[objIndex].getSchedulerIndependentStateValues().end() && !storm::utility::isZero(schedIndValueIt->second)) {
+                                excludedStates.insert(stateChoices.first);
+                            }
                         }
+                        excludedStatesToObjIndex[excludedStates].push_back(objIndex);
                     }
-                    std::vector<uint64_t> toGlobalStateIndexMapping(mecStatesAsBitVector.begin(), mecStatesAsBitVector.end());
-                    std::vector<uint64_t> toGlobalChoiceIndexMapping(mecChoicesAsBitVector.begin(), mecChoicesAsBitVector.end());
-                    //std::cout << "mec choices of ec" << ecCounter << ": " << mecChoicesAsBitVector << std::endl;
-                    storm::storage::SparseMatrix<ValueType> mecTransitions = model.getTransitionMatrix().getSubmatrix(false, mecChoicesAsBitVector, mecStatesAsBitVector);
                     
-                    // Create a variable for each subEC and add it for the corresponding states.
-                    // Also assert that not every state takes an ec choice.
-                    auto subEcs = getSubEndComponents(mecTransitions);
-                    for (auto const& subEc : subEcs) {
-                        // get the choices of the current EC with some non-zero value (i.e. reward).
-                        // std::cout << "sub ec choices of ec" << ecCounter << ": " << subEc.second << std::endl;
-                        storm::storage::BitVector subEcChoicesWithValueZero = subEc.second;
-                        for (auto const& localSubEcChoiceIndex : subEc.second) {
-                            uint64_t subEcChoice = toGlobalChoiceIndexMapping[localSubEcChoiceIndex];
-                            for (auto const& objHelper : objectiveHelper) {
-                                if (objHelper.getChoiceValueOffsets().count(subEcChoice) > 0) {
-                                    STORM_LOG_ASSERT(!storm::utility::isZero(objHelper.getChoiceValueOffsets().at(subEcChoice)), "Expected non-zero choice-value offset.");
-                                    subEcChoicesWithValueZero.set(localSubEcChoiceIndex, false);
-                                    break;
+                    for (auto const& exclStates : excludedStatesToObjIndex) {
+                        if (exclStates.first.empty()) {
+                            auto ecVars = processEc(mec, model.getTransitionMatrix(), "", choiceVariables, *lpModel);
+                            ++ecCounter;
+                            for (auto const& stateVar : ecVars) {
+                                for (auto const& objIndex : exclStates.second) {
+                                    result[objIndex][stateVar.first] = stateVar.second;
                                 }
                             }
-                        }
-                        
-                        // Check whether each state has at least one zero-valued choice
-                        bool zeroValueSubEc = true;
-                        for (auto const& state : subEc.first) {
-                            if (subEcChoicesWithValueZero.getNextSetIndex(mecTransitions.getRowGroupIndices()[state]) >= mecTransitions.getRowGroupIndices()[state + 1]) {
-                                zeroValueSubEc = false;
-                                break;
-                            }
-                        }
-                        
-                        if (zeroValueSubEc) {
-                            // Create a variable that is one iff upon entering this subEC no more choice value is collected.
-                            auto ecVar = lpModel->addBoundedIntegerVariable("ec" + std::to_string(ecCounter++), storm::utility::zero<ValueType>(), storm::utility::one<ValueType>());
-                            // assign this variable to every state in the ec
-                            for (auto const& localSubEcStateIndex : subEc.first) {
-                                uint64_t subEcState = toGlobalStateIndexMapping[localSubEcStateIndex];
-                                result[subEcState].push_back(ecVar);
-                            }
-                            // Create the sum over all choice vars that induce zero choice value
-                            std::vector<storm::expressions::Expression> ecChoiceVars;
-                            uint64_t numSubEcStatesWithMultipleChoices = subEc.first.getNumberOfSetBits();
-                            for (auto const& localSubEcChoiceIndex : subEcChoicesWithValueZero) {
-                                uint64_t subEcChoice = toGlobalChoiceIndexMapping[localSubEcChoiceIndex];
-                                if (choiceVars[subEcChoice].isInitialized()) {
-                                    ecChoiceVars.push_back(choiceVars[subEcChoice]);
-                                } else {
-                                    // If there is no choiceVariable, it means that this corresponds to a state with just one choice.
-                                    assert(numSubEcStatesWithMultipleChoices > 0);
-                                    --numSubEcStatesWithMultipleChoices;
+                        } else {
+                            // Compute sub-end components
+                            storm::storage::BitVector subEcStates(model.getNumberOfStates(), false), subEcChoices(model.getNumberOfChoices(), false);
+                            for (auto const& stateChoices : mec) {
+                                if (exclStates.first.count(stateChoices.first) == 0) {
+                                    subEcStates.set(stateChoices.first, true);
+                                    for (auto const& choice : stateChoices.second) {
+                                        subEcChoices.set(choice, true);
+                                    }
                                 }
                             }
-                            // Assert that the ecVar is one iff the sum over the zero-value-choice variables equals the number of states in this ec
-                            storm::expressions::Expression ecVarBound = one - lpModel->getConstant(storm::utility::convertNumber<ValueType>(numSubEcStatesWithMultipleChoices)).simplify();
-                            if (!ecChoiceVars.empty()) {
-                                ecVarBound = ecVarBound + storm::expressions::sum(ecChoiceVars);
-                            }
-                            if (inOutEncoding()) {
-                                lpModel->addConstraint("", ecVar <= ecVarBound);
-                            } else {
-                                lpModel->addConstraint("", ecVar >= ecVarBound);
+                            storm::storage::MaximalEndComponentDecomposition<ValueType> subEcs(model.getTransitionMatrix(), backwardTransitions, subEcStates, subEcChoices);
+                            for (auto const& subEc : subEcs) {
+                                auto ecVars = processEc(subEc, model.getTransitionMatrix(), "o" + std::to_string(*exclStates.second.begin()), choiceVariables, *lpModel);
+                                ++ecCounter;
+                                for (auto const& stateVar : ecVars) {
+                                    for (auto const& objIndex : exclStates.second) {
+                                        result[objIndex][stateVar.first] = stateVar.second;
+                                    }
+                                }
                             }
                         }
                     }
                 }
-                
-                STORM_LOG_TRACE("Found " << ecCounter << " end components.");
+                std::cout << "found " << ecCounter << "many ECs" << std::endl;
                 return result;
             }
             
@@ -306,12 +360,11 @@ namespace storm {
                 auto one = lpModel->getConstant(storm::utility::one<ValueType>());
                 auto const& groups = model.getTransitionMatrix().getRowGroupIndices();
                 // Create choice variables.
-                std::vector<storm::expressions::Expression> choiceVars;
-                choiceVars.reserve(model.getNumberOfChoices());
+                choiceVariables.reserve(model.getNumberOfChoices());
                 for (uint64_t state = 0; state < numStates; ++state) {
                     uint64_t numChoices = model.getNumberOfChoices(state);
                     if (numChoices == 1) {
-                        choiceVars.emplace_back();
+                        choiceVariables.emplace_back();
                     } else {
                         std::vector<storm::expressions::Expression> localChoices;
                         if (choiceVarReduction()) {
@@ -319,23 +372,28 @@ namespace storm {
                         }
                         for (uint64_t choice = 0; choice < numChoices; ++choice) {
                             localChoices.push_back(lpModel->addBoundedIntegerVariable("c" + std::to_string(state) + "_" + std::to_string(choice), 0, 1).getExpression());
-                            choiceVars.push_back(localChoices.back());
+                            choiceVariables.push_back(localChoices.back());
                         }
                             storm::expressions::Expression localChoicesSum = storm::expressions::sum(localChoices);
                         if (choiceVarReduction()) {
                             lpModel->addConstraint("", localChoicesSum <= one);
-                            choiceVars.push_back(one - localChoicesSum);
+                            choiceVariables.push_back(one - localChoicesSum);
                         } else {
                             lpModel->addConstraint("", localChoicesSum == one);
                         }
                     }
                 }
-                // Create ec Variables and assert for each sub-ec that not all choice variables stay there
-                auto ecVars = createEcVariables(choiceVars);
+                // Create ec Variables for each state/objective
+                auto ecVars = createEcVariables();
                 bool hasEndComponents = false;
-                for (auto const& stateEcVars : ecVars) {
-                    if (!stateEcVars.empty()) {
-                        hasEndComponents = true;
+                for (auto const& objEcVars : ecVars) {
+                    for (auto const& ecVar : objEcVars) {
+                        if (ecVar.isInitialized()) {
+                            hasEndComponents = true;
+                            break;
+                        }
+                    }
+                    if (hasEndComponents) {
                         break;
                     }
                 }
@@ -365,7 +423,7 @@ namespace storm {
                         for (uint64_t globalChoice = groups[state]; globalChoice < groups[state + 1]; ++globalChoice) {
                             choiceValVars[globalChoice] = lpModel->addBoundedContinuousVariable("y" + std::to_string(globalChoice), storm::utility::zero<ValueType>(), visitingTimesUpperBounds[state]).getExpression();
                             if (model.getNumberOfChoices(state) > 1) {;
-                                lpModel->addConstraint("", choiceValVars[globalChoice] <= lpModel->getConstant(visitingTimesUpperBounds[state]) * choiceVars[globalChoice]);
+                                lpModel->addConstraint("", choiceValVars[globalChoice] <= lpModel->getConstant(visitingTimesUpperBounds[state]) * choiceVariables[globalChoice]);
                             }
                         }
                     }
@@ -373,13 +431,10 @@ namespace storm {
                     std::vector<storm::expressions::Expression> ecValVars(model.getNumberOfStates());
                     if (hasEndComponents) {
                         for (auto const& state : nonBottomStates) {
-                            if (!ecVars[state].empty()) {
+                            // For the in-out-encoding, all objectives have the same ECs. Hence, we only care for the variables of the first objective.
+                            if (ecVars.front()[state].isInitialized()) {
                                 ecValVars[state] = lpModel->addBoundedContinuousVariable("z" + std::to_string(state), storm::utility::zero<ValueType>(), visitingTimesUpperBounds[state]).getExpression();
-                                std::vector<storm::expressions::Expression> ecValueSum;
-                                for (auto const& ecVar : ecVars[state]) {
-                                    ecValueSum.push_back(lpModel->getConstant(visitingTimesUpperBounds[state]) * ecVar.getExpression());
-                                }
-                                lpModel->addConstraint("", ecValVars[state] <= storm::expressions::sum(ecValueSum));
+                                lpModel->addConstraint("", ecValVars[state] <= lpModel->getConstant(visitingTimesUpperBounds[state]) * ecVars.front()[state]);
                             }
                         }
                     }
@@ -439,7 +494,6 @@ namespace storm {
                         auto const& schedulerIndependentStates = objectiveHelper[objIndex].getSchedulerIndependentStateValues();
                         // Create state variables and store variables of ecs which contain a state with a scheduler independent value
                         std::vector<storm::expressions::Expression> stateVars;
-                        std::set<storm::expressions::Variable> ecVarsWithValue;
                         stateVars.reserve(numStates);
                         for (uint64_t state = 0; state < numStates; ++state) {
                             auto valIt = schedulerIndependentStates.find(state);
@@ -455,11 +509,6 @@ namespace storm {
                                     value = -value;
                                 }
                                 stateVars.push_back(lpModel->getConstant(value));
-                                if (hasEndComponents) {
-                                    for (auto const& ecVar : ecVars[state]) {
-                                        ecVarsWithValue.insert(ecVar);
-                                    }
-                                }
                             }
                             if (state == initialState) {
                                 initialStateResults.push_back(stateVars.back());
@@ -500,7 +549,7 @@ namespace storm {
                                 } else {
                                     uint64_t globalChoiceIndex = groups[state] + choice;
                                     if (isMaxDiffEncoding()) {
-                                        storm::expressions::Expression maxDiff = upperValueBoundAtState * (one - choiceVars[globalChoiceIndex]);
+                                        storm::expressions::Expression maxDiff = upperValueBoundAtState * (one - choiceVariables[globalChoiceIndex]);
                                         if (objectiveHelper[objIndex].minimizing()) {
                                             lpModel->addConstraint("", stateVars[state] >= choiceValue - maxDiff);
                                         } else {
@@ -517,14 +566,14 @@ namespace storm {
                                     if (objectiveHelper[objIndex].minimizing()) {
                                         if (isMinNegativeEncoding()) {
                                             lpModel->addConstraint("", choiceValVar <= choiceValue);
-                                            lpModel->addConstraint("", choiceValVar <= -upperValueBoundAtState * (one - choiceVars[globalChoiceIndex]));
+                                            lpModel->addConstraint("", choiceValVar <= -upperValueBoundAtState * (one - choiceVariables[globalChoiceIndex]));
                                         } else {
-                                            lpModel->addConstraint("", choiceValVar + (upperValueBoundAtState * (one - choiceVars[globalChoiceIndex])) >= choiceValue);
+                                            lpModel->addConstraint("", choiceValVar + (upperValueBoundAtState * (one - choiceVariables[globalChoiceIndex])) >= choiceValue);
                                             // Optional: lpModel->addConstraint("", choiceValVar <= choiceValue);
                                         }
                                     } else {
                                         lpModel->addConstraint("", choiceValVar <= choiceValue);
-                                        lpModel->addConstraint("", choiceValVar <= upperValueBoundAtState * choiceVars[globalChoiceIndex]);
+                                        lpModel->addConstraint("", choiceValVar <= upperValueBoundAtState * choiceVariables[globalChoiceIndex]);
                                     }
                                     if (choice == 0) {
                                         stateValue = choiceValVar;
@@ -543,34 +592,31 @@ namespace storm {
                             } else {
                                 lpModel->addConstraint("", stateVars[state] <= stateValue);
                             }
-                            if (numChoices > 1) {
-                                for (auto const& ecVar : ecVars[state]) {
-                                    if (ecVarsWithValue.count(ecVar) == 0) {
-                                        // if this ec is taken, make sure to assign a value of zero
-                                        if (objectiveHelper[objIndex].minimizing()) {
-                                            // TODO: these are optional
-                                            if (isMinNegativeEncoding()) {
-                                                lpModel->addConstraint("", stateVars[state] >= (ecVar.getExpression() - one) * lpModel->getConstant(objectiveHelper[objIndex].getUpperValueBoundAtState(env, state)));
-                                            } else {
-                                                lpModel->addConstraint("", stateVars[state] <= (one - ecVar.getExpression()) * lpModel->getConstant(objectiveHelper[objIndex].getUpperValueBoundAtState(env, state)));
-                                            }
+                            if (numChoices > 1 && hasEndComponents) {
+                                auto& ecVar = ecVars[objIndex][state];
+                                if (ecVar.isInitialized()) {
+                                    // if this state is part of an ec, make sure to assign a value of zero.
+                                    if (objectiveHelper[objIndex].minimizing()) {
+                                        // TODO: these are optional
+                                        if (isMinNegativeEncoding()) {
+                                            lpModel->addConstraint("", stateVars[state] >= (ecVar - one) * lpModel->getConstant(objectiveHelper[objIndex].getUpperValueBoundAtState(env, state)));
                                         } else {
-                                            lpModel->addConstraint("", stateVars[state] <= (one - ecVar.getExpression()) * lpModel->getConstant(objectiveHelper[objIndex].getUpperValueBoundAtState(env, state)));
+                                            lpModel->addConstraint("", stateVars[state] <= (one - ecVar) * lpModel->getConstant(objectiveHelper[objIndex].getUpperValueBoundAtState(env, state)));
                                         }
+                                    } else {
+                                        lpModel->addConstraint("", stateVars[state] <= (one - ecVar) * lpModel->getConstant(objectiveHelper[objIndex].getUpperValueBoundAtState(env, state)));
                                     }
                                 }
                             }
                         }
                     }
                 }
-                swAux.start();
                 lpModel->update();
-                swAux.stop();
                 STORM_LOG_INFO("Done initializing LP model.");
             }
             
             template <typename ModelType, typename GeometryValueType>
-            void DeterministicSchedsLpChecker<ModelType, GeometryValueType>::checkRecursive(storm::storage::geometry::PolytopeTree <GeometryValueType>& polytopeTree, GeometryValueType const& eps, std::vector<Point>& foundPoints, std::vector<Polytope>& infeasableAreas, uint64_t const& depth) {
+            void DeterministicSchedsLpChecker<ModelType, GeometryValueType>::checkRecursive(Environment const& env, storm::storage::geometry::PolytopeTree <GeometryValueType>& polytopeTree, GeometryValueType const& eps, std::vector<Point>& foundPoints, std::vector<Polytope>& infeasableAreas, uint64_t const& depth) {
                 std::cout << ".";
                 std::cout.flush();
                 STORM_LOG_ASSERT(!polytopeTree.isEmpty(), "Tree node is empty");
@@ -602,6 +648,8 @@ namespace storm {
                         polytopeTree.clear();
                     } else {
                         STORM_LOG_ASSERT(!lpModel->isUnbounded(), "LP result is unbounded.");
+                        // TODO: only for debugging
+                        validateCurrentModel(env);
                         Point newPoint;
                         for (auto const& objVar : currentObjectiveVariables) {
                             newPoint.push_back(storm::utility::convertNumber<GeometryValueType>(lpModel->getContinuousValue(objVar)));
@@ -642,14 +690,14 @@ namespace storm {
                         }
                         swAux.stop();
                         if (!polytopeTree.isEmpty()) {
-                            checkRecursive(polytopeTree, eps, foundPoints, infeasableAreas, depth);
+                            checkRecursive(env, polytopeTree, eps, foundPoints, infeasableAreas, depth);
                         }
                     }
                 } else {
                     // Traverse all the children.
                     for (uint64_t childId = 0; childId < polytopeTree.getChildren().size(); ++childId) {
                         uint64_t newPointIndex = foundPoints.size();
-                        checkRecursive(polytopeTree.getChildren()[childId], eps, foundPoints, infeasableAreas, depth + 1);
+                        checkRecursive(env, polytopeTree.getChildren()[childId], eps, foundPoints, infeasableAreas, depth + 1);
                         STORM_LOG_ASSERT(polytopeTree.getChildren()[childId].isEmpty(), "expected empty children.");
                         // Make the new points known to the right siblings
                         for (; newPointIndex < foundPoints.size(); ++newPointIndex) {
@@ -669,6 +717,41 @@ namespace storm {
                 swLpBuild.stop();
             }
             
+            template <typename ModelType, typename GeometryValueType>
+            void DeterministicSchedsLpChecker<ModelType, GeometryValueType>::validateCurrentModel(Environment const& env) const {
+                storm::storage::Scheduler<ValueType> scheduler(model.getNumberOfStates());
+                for (uint64_t state = 0; state < model.getNumberOfStates(); ++state) {
+                    uint64_t numChoices = model.getNumberOfChoices(state);
+                    if (numChoices == 1) {
+                        scheduler.setChoice(0, state);
+                    } else {
+                        uint64_t globalChoiceOffset = model.getTransitionMatrix().getRowGroupIndices()[state];
+                        bool choiceFound = false;
+                        for (uint64_t localChoice = 0; localChoice < numChoices; ++localChoice) {
+                            if (lpModel->getIntegerValue(choiceVariables[globalChoiceOffset + localChoice].getBaseExpression().asVariableExpression().getVariable()) == 1) {
+                                STORM_LOG_THROW(!choiceFound, storm::exceptions::UnexpectedException, "Multiple choices selected at state " << state);
+                                scheduler.setChoice(localChoice, state);
+                                choiceFound = true;
+                            }
+                        }
+                        STORM_LOG_THROW(choiceFound, storm::exceptions::UnexpectedException, "No choice selected at state " << state);
+                    }
+                }
+                auto inducedModel = model.applyScheduler(scheduler)->template as<ModelType>();
+                for (uint64_t objIndex = 0; objIndex < objectiveHelper.size(); ++objIndex) {
+                    ValueType expectedValue = lpModel->getContinuousValue(currentObjectiveVariables[objIndex]);
+                    if (objectiveHelper[objIndex].minimizing()) {
+                        expectedValue = -expectedValue;
+                    }
+                    ValueType actualValue = objectiveHelper[objIndex].evaluateOnModel(env, *inducedModel);
+                    std::cout << "obj" << objIndex << ": LpSolver: " << storm::utility::convertNumber<double>(expectedValue) << " (" << expectedValue << ")" << std::endl;
+                    std::cout << "obj" << objIndex << ": model checker: " << storm::utility::convertNumber<double>(actualValue) << " (" << actualValue << ")" << std::endl;
+                    STORM_LOG_THROW(storm::utility::convertNumber<double>(storm::utility::abs<ValueType>(actualValue - expectedValue)) <= 1e-4, storm::exceptions::UnexpectedException, "Invalid value for objective " << objIndex << ": expected " << expectedValue << " but got " << actualValue);
+                }
+                std::cout << std::endl;
+                
+            }
+            
             template class DeterministicSchedsLpChecker<storm::models::sparse::Mdp<double>, storm::RationalNumber>;
             template class DeterministicSchedsLpChecker<storm::models::sparse::Mdp<storm::RationalNumber>, storm::RationalNumber>;
             template class DeterministicSchedsLpChecker<storm::models::sparse::MarkovAutomaton<double>, storm::RationalNumber>;
diff --git a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.h b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.h
index e95980ff8..aa04ad926 100644
--- a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.h
+++ b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsLpChecker.h
@@ -46,18 +46,20 @@ namespace storm {
                 std::pair<std::vector<Point>, std::vector<Polytope>> check(storm::Environment const& env, storm::storage::geometry::PolytopeTree<GeometryValueType>& polytopeTree, GeometryValueType const& eps);
 
             private:
-                std::vector<std::vector<storm::expressions::Variable>> createEcVariables(std::vector<storm::expressions::Expression> const& choiceVars);
+                std::vector<std::vector<storm::expressions::Expression>> createEcVariables();
                 void initializeLpModel(Environment const& env);
-
-                
                 
-                void checkRecursive(storm::storage::geometry::PolytopeTree<GeometryValueType>& polytopeTree, GeometryValueType const& eps, std::vector<Point>& foundPoints, std::vector<Polytope>& infeasableAreas, uint64_t const& depth);
+                // Builds the induced markov chain of the current model and checks whether the resulting value coincide with the result of the lp solver.
+                void validateCurrentModel(Environment const& env) const;
+
+                void checkRecursive(storm::Environment const& env, storm::storage::geometry::PolytopeTree<GeometryValueType>& polytopeTree, GeometryValueType const& eps, std::vector<Point>& foundPoints, std::vector<Polytope>& infeasableAreas, uint64_t const& depth);
                 
                 ModelType const& model;
                 std::vector<DeterministicSchedsObjectiveHelper<ModelType>> const& objectiveHelper;
 
                 std::unique_ptr<storm::solver::LpSolver<ValueType>> lpModel;
                 storm::solver::GurobiLpSolver<ValueType>* gurobiLpModel;
+                std::vector<storm::expressions::Expression> choiceVariables;
                 std::vector<storm::expressions::Expression> initialStateResults;
                 std::vector<storm::expressions::Variable> currentObjectiveVariables;
                 std::vector<GeometryValueType> currentWeightVector;
diff --git a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.cpp b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.cpp
index aa1a6dd7e..7e1dd6489 100644
--- a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.cpp
+++ b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.cpp
@@ -333,6 +333,10 @@ namespace storm {
                 return visitingTimesUpperBounds;
             }
             
+            template <typename ModelType>
+            typename ModelType::ValueType DeterministicSchedsObjectiveHelper<ModelType>::evaluateOnModel(Environment const& env, ModelType const& evaluatedModel) const {
+                return evaluateOperatorFormula(env, evaluatedModel, *objective.formula)[*evaluatedModel.getInitialStates().begin()];
+            }
 
             template class DeterministicSchedsObjectiveHelper<storm::models::sparse::Mdp<double>>;
             template class DeterministicSchedsObjectiveHelper<storm::models::sparse::Mdp<storm::RationalNumber>>;
diff --git a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.h b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.h
index 4250c4ebe..e50664e40 100644
--- a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.h
+++ b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsObjectiveHelper.h
@@ -44,6 +44,8 @@ namespace storm {
                  */
                 bool isTotalRewardObjective() const;
                 
+                ValueType evaluateOnModel(Environment const& env, ModelType const& evaluatedModel) const;
+                
                 static std::vector<ValueType> computeUpperBoundOnExpectedVisitingTimes(storm::storage::SparseMatrix<ValueType> const& modelTransitions, storm::storage::BitVector const& bottomStates, storm::storage::BitVector const& nonBottomStates, bool hasEndComponents);
 
             private:
diff --git a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsParetoExplorer.cpp b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsParetoExplorer.cpp
index dda3b2c0f..a1e05f377 100644
--- a/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsParetoExplorer.cpp
+++ b/src/storm/modelchecker/multiobjective/deterministicScheds/DeterministicSchedsParetoExplorer.cpp
@@ -426,7 +426,7 @@ namespace storm {
                             f.addPoint(p.first, p.second);
                         }
                     }
-                    STORM_LOG_ASSERT(std::count(f.getHalfspace().normalVector().begin(), f.getHalfspace().normalVector().end(), storm::utility::zero<GeometryValueType>()) + f.getNumberOfPoints() == objectives.size(), "Unexpected number of points on facet.");
+                    STORM_LOG_ASSERT(std::count(f.getHalfspace().normalVector().begin(), f.getHalfspace().normalVector().end(), storm::utility::zero<GeometryValueType>()) + f.getNumberOfPoints() >= objectives.size(), "Not enough points on facet.");
                     
                     unprocessedFacets.push(std::move(f));
                 }
diff --git a/src/storm/modelchecker/multiobjective/pcaa/RewardBoundedMdpPcaaWeightVectorChecker.cpp b/src/storm/modelchecker/multiobjective/pcaa/RewardBoundedMdpPcaaWeightVectorChecker.cpp
index 8f1d00f6c..548eb1390 100644
--- a/src/storm/modelchecker/multiobjective/pcaa/RewardBoundedMdpPcaaWeightVectorChecker.cpp
+++ b/src/storm/modelchecker/multiobjective/pcaa/RewardBoundedMdpPcaaWeightVectorChecker.cpp
@@ -320,6 +320,7 @@ namespace storm {
                     storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxSolverFactory;
                     cachedData.minMaxSolver = minMaxSolverFactory.create(env, epochModel.epochMatrix);
                     cachedData.minMaxSolver->setHasUniqueSolution();
+                    cachedData.minMaxSolver->setHasNoEndComponents();
                     cachedData.minMaxSolver->setTrackScheduler(true);
                     cachedData.minMaxSolver->setCachingEnabled(true);
                     auto req = cachedData.minMaxSolver->getRequirements(env);
diff --git a/src/storm/modelchecker/multiobjective/pcaa/StandardMaPcaaWeightVectorChecker.cpp b/src/storm/modelchecker/multiobjective/pcaa/StandardMaPcaaWeightVectorChecker.cpp
index f5980166c..72052768a 100644
--- a/src/storm/modelchecker/multiobjective/pcaa/StandardMaPcaaWeightVectorChecker.cpp
+++ b/src/storm/modelchecker/multiobjective/pcaa/StandardMaPcaaWeightVectorChecker.cpp
@@ -301,6 +301,7 @@ namespace storm {
                 storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxSolverFactory;
                 result->solver = minMaxSolverFactory.create(env, PS.toPS);
                 result->solver->setHasUniqueSolution(true);
+                result->solver->setHasNoEndComponents(true); // Non-zeno MA
                 result->solver->setTrackScheduler(true);
                 result->solver->setCachingEnabled(true);
                 auto req = result->solver->getRequirements(env, storm::solver::OptimizationDirection::Maximize, false);
diff --git a/src/storm/modelchecker/multiobjective/pcaa/StandardPcaaWeightVectorChecker.cpp b/src/storm/modelchecker/multiobjective/pcaa/StandardPcaaWeightVectorChecker.cpp
index 23b256306..307d91c3f 100644
--- a/src/storm/modelchecker/multiobjective/pcaa/StandardPcaaWeightVectorChecker.cpp
+++ b/src/storm/modelchecker/multiobjective/pcaa/StandardPcaaWeightVectorChecker.cpp
@@ -163,6 +163,50 @@ namespace storm {
                 return result;
             }
             
+            template <typename ValueType>
+            std::vector<uint64_t> computeValidInitialScheduler(storm::storage::SparseMatrix<ValueType> const& matrix, storm::storage::BitVector const& rowsWithSumLessOne) {
+                std::vector<uint64_t> result(matrix.getRowGroupCount());
+                auto const& groups = matrix.getRowGroupIndices();
+                auto backwardsTransitions = matrix.transpose(true);
+                storm::storage::BitVector processedStates(result.size(), false);
+                for (uint64_t state = 0; state < result.size(); ++state) {
+                    if (rowsWithSumLessOne.getNextSetIndex(groups[state]) < groups[state + 1]) {
+                        result[state] = rowsWithSumLessOne.getNextSetIndex(groups[state]) - groups[state];
+                        processedStates.set(state, true);
+                    }
+                }
+                std::vector<uint64_t> stack(processedStates.begin(), processedStates.end());
+                while (!stack.empty()) {
+                    uint64_t current = stack.back();
+                    stack.pop_back();
+                    STORM_LOG_ASSERT(processedStates.get(current), "states on the stack shall be processed.");
+                    for (auto const& entry : backwardsTransitions.getRow(current)) {
+                        uint64_t pred = entry.getColumn();
+                        if (!processedStates.get(pred)) {
+                            // Find a choice that leads to a processed state
+                            uint64_t predChoice = groups[pred];
+                            bool foundSuccessor = false;
+                            for (; predChoice < groups[pred + 1]; ++predChoice) {
+                                for (auto const& predEntry : matrix.getRow(predChoice)) {
+                                    if (processedStates.get(predEntry.getColumn())) {
+                                        foundSuccessor = true;
+                                        break;
+                                    }
+                                }
+                                if (foundSuccessor) {
+                                    break;
+                                }
+                            }
+                            STORM_LOG_ASSERT(foundSuccessor && predChoice < groups[pred + 1], "Predecessor of a processed state should have a processed successor");
+                            result[pred] = predChoice - groups[pred];
+                            processedStates.set(pred, true);
+                            stack.push_back(pred);
+                        }
+                    }
+                }
+                return result;
+            }
+            
             template <class SparseModelType>
             void StandardPcaaWeightVectorChecker<SparseModelType>::unboundedWeightedPhase(Environment const& env, std::vector<ValueType> const& weightedRewardVector, std::vector<ValueType> const& weightVector) {
                 
@@ -189,6 +233,10 @@ namespace storm {
                 if (solver->hasUpperBound()) {
                     req.clearUpperBounds();
                 }
+                if (req.validInitialScheduler()) {
+                    solver->setInitialScheduler(computeValidInitialScheduler(ecQuotient->matrix, ecQuotient->rowsWithSumLessOne));
+                    req.clearValidInitialScheduler();
+                }
                 STORM_LOG_THROW(!req.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + req.getEnabledRequirementsAsString() + " not checked.");
                 solver->setRequirementsChecked(true);
                 
diff --git a/src/storm/modelchecker/prctl/helper/HybridMdpPrctlHelper.cpp b/src/storm/modelchecker/prctl/helper/HybridMdpPrctlHelper.cpp
index 7cb054574..d1755ef38 100644
--- a/src/storm/modelchecker/prctl/helper/HybridMdpPrctlHelper.cpp
+++ b/src/storm/modelchecker/prctl/helper/HybridMdpPrctlHelper.cpp
@@ -146,22 +146,23 @@ namespace storm {
                 } else {
                     // If there are maybe states, we need to solve an equation system.
                     if (!maybeStates.isZero()) {
-                        // If we minimize, we know that the solution to the equation system is unique.
-                        bool uniqueSolution = dir == storm::solver::OptimizationDirection::Minimize;
+                        // If we minimize, we know that the solution to the equation system has no end components
+                        bool hasNoEndComponents = dir == storm::solver::OptimizationDirection::Minimize;
                         // Check for requirements of the solver early so we can adjust the maybe state computation accordingly.
                         storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> linearEquationSolverFactory;
-                        storm::solver::MinMaxLinearEquationSolverRequirements requirements = linearEquationSolverFactory.getRequirements(env, uniqueSolution, dir);
+                        storm::solver::MinMaxLinearEquationSolverRequirements requirements = linearEquationSolverFactory.getRequirements(env, hasNoEndComponents, hasNoEndComponents, dir);
                         storm::solver::MinMaxLinearEquationSolverRequirements clearedRequirements = requirements;
                         SolverRequirementsData<ValueType> solverRequirementsData;
                         bool extendMaybeStates = false;
                         
                         if (clearedRequirements.hasEnabledRequirement()) {
-                            if (clearedRequirements.noEndComponents()) {
-                                STORM_LOG_DEBUG("Scheduling EC elimination, because the solver requires it.");
+                            if (clearedRequirements.uniqueSolution()) {
+                                STORM_LOG_DEBUG("Scheduling EC elimination, because the solver requires a unique solution.");
                                 extendMaybeStates = true;
-                                clearedRequirements.clearNoEndComponents();
+                                clearedRequirements.clearUniqueSolution();
+                                hasNoEndComponents = true;
                             }
-                            if (clearedRequirements.validInitialScheduler()) {
+                            if (clearedRequirements.validInitialScheduler() && !hasNoEndComponents) {
                                 STORM_LOG_DEBUG("Scheduling valid scheduler computation, because the solver requires it.");
                                 clearedRequirements.clearValidInitialScheduler();
                             }
@@ -210,8 +211,6 @@ namespace storm {
                             // Eliminate the end components and remove the states that are not interesting (target or non-filter).
                             eliminateEndComponentsAndExtendedStatesUntilProbabilities(explicitRepresentation, solverRequirementsData, targetStates);
                             
-                            // The solution becomes unique after end components have been eliminated.
-                            uniqueSolution = true;
                         } else {
                             // Then compute the vector that contains the one-step probabilities to a state with probability 1 for all
                             // maybe states.
@@ -240,8 +239,9 @@ namespace storm {
                         
                         std::unique_ptr<storm::solver::MinMaxLinearEquationSolver<ValueType>> solver = linearEquationSolverFactory.create(env, std::move(explicitRepresentation.first));
                         
-                        // Set whether the equation system will have a unique solution
-                        solver->setHasUniqueSolution(uniqueSolution);
+                        // Set whether the equation system will have a unique solution / no end components
+                        solver->setHasUniqueSolution(hasNoEndComponents);
+                        solver->setHasNoEndComponents(hasNoEndComponents);
 
                         if (solverRequirementsData.initialScheduler) {
                             solver->setInitialScheduler(std::move(solverRequirementsData.initialScheduler.get()));
@@ -251,7 +251,7 @@ namespace storm {
                         solver->solveEquations(env, dir, x, explicitRepresentation.second);
                         
                         // If we included some target and non-filter states in the ODD, we need to expand the result from the solver.
-                        if (requirements.noEndComponents() && solverRequirementsData.ecInformation) {
+                        if (requirements.uniqueSolution() && solverRequirementsData.ecInformation) {
                             std::vector<ValueType> extendedVector(solverRequirementsData.properMaybeStates.getNumberOfSetBits());
                             solverRequirementsData.ecInformation.get().setValues(extendedVector, solverRequirementsData.properMaybeStates, x);
                             x = std::move(extendedVector);
@@ -543,17 +543,20 @@ namespace storm {
                     // If there are maybe states, we need to solve an equation system.
                     if (!maybeStates.isZero()) {
                         // If we maximize, we know that the solution to the equation system is unique.
-                        bool uniqueSolution = dir == storm::solver::OptimizationDirection::Maximize;
+                        bool hasNoEndComponents = dir == storm::solver::OptimizationDirection::Maximize;
+                        bool hasUniqueSolution = hasNoEndComponents;
                         // Check for requirements of the solver this early so we can adapt the maybe states accordingly.
                         storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> linearEquationSolverFactory;
-                        storm::solver::MinMaxLinearEquationSolverRequirements requirements = linearEquationSolverFactory.getRequirements(env, uniqueSolution, dir);
+                        storm::solver::MinMaxLinearEquationSolverRequirements requirements = linearEquationSolverFactory.getRequirements(env, hasUniqueSolution, hasNoEndComponents, dir);
                         storm::solver::MinMaxLinearEquationSolverRequirements clearedRequirements = requirements;
                         bool extendMaybeStates = false;
                         if (clearedRequirements.hasEnabledRequirement()) {
-                            if (clearedRequirements.noEndComponents()) {
+                            if (clearedRequirements.uniqueSolution()) {
                                 STORM_LOG_DEBUG("Scheduling EC elimination, because the solver requires it.");
                                 extendMaybeStates = true;
-                                clearedRequirements.clearNoEndComponents();
+                                clearedRequirements.clearUniqueSolution();
+                                hasUniqueSolution = true;
+                                // There might still be end components in which reward is collected.
                             }
                             if (clearedRequirements.validInitialScheduler()) {
                                 STORM_LOG_DEBUG("Computing valid scheduler, because the solver requires it.");
@@ -611,11 +614,15 @@ namespace storm {
                             storm::storage::BitVector targetStates = computeTargetStatesForReachabilityRewardsFromExplicitRepresentation(explicitRepresentation.first);
                             solverRequirementsData.properMaybeStates = ~targetStates;
 
-                            if (requirements.noEndComponents()) {
+                            if (requirements.uniqueSolution()) {
+                                STORM_LOG_THROW(!requirements.validInitialScheduler(), storm::exceptions::UncheckedRequirementException, "The underlying solver requires a unique solution and an initial valid scheduler. This is currently not supported for expected reward properties.");
+                                // eliminate the end components with reward 0.
+                                // Note that this may also compute the oneStepTargetProbabilities if upper bounds are required.
                                 eliminateEndComponentsAndTargetStatesReachabilityRewards(explicitRepresentation, solverRequirementsData, targetStates, requirements.upperBounds());
                                 // The solution becomes unique after end components have been eliminated.
-                                uniqueSolution = true;
-                            } else {
+                                hasUniqueSolution = true;
+                            }
+                            else {
                                 if (requirements.validInitialScheduler()) {
                                     // Compute a valid initial scheduler.
                                     solverRequirementsData.initialScheduler = computeValidInitialSchedulerForReachabilityRewards<ValueType>(explicitRepresentation.first, solverRequirementsData.properMaybeStates, targetStates);
@@ -637,8 +644,9 @@ namespace storm {
                         // Now solve the resulting equation system.
                         std::unique_ptr<storm::solver::MinMaxLinearEquationSolver<ValueType>> solver = linearEquationSolverFactory.create(env);
                         
-                        // Set whether the equation system will have a unique solution
-                        solver->setHasUniqueSolution(uniqueSolution);
+                        // Set whether the equation system will have a unique solution / no end components
+                        solver->setHasUniqueSolution(hasUniqueSolution);
+                        solver->setHasNoEndComponents(hasNoEndComponents);
                         
                         // If the solver requires upper bounds, compute them now.
                         if (requirements.upperBounds()) {
@@ -657,7 +665,7 @@ namespace storm {
                         solver->solveEquations(env, dir, x, explicitRepresentation.second);
 
                         // If we eliminated end components, we need to extend the solution vector.
-                        if (requirements.noEndComponents() && solverRequirementsData.ecInformation) {
+                        if (requirements.uniqueSolution() && solverRequirementsData.ecInformation) {
                             std::vector<ValueType> extendedVector(solverRequirementsData.properMaybeStates.getNumberOfSetBits());
                             solverRequirementsData.ecInformation.get().setValues(extendedVector, solverRequirementsData.properMaybeStates, x);
                             x = std::move(extendedVector);
diff --git a/src/storm/modelchecker/prctl/helper/SparseMdpPrctlHelper.cpp b/src/storm/modelchecker/prctl/helper/SparseMdpPrctlHelper.cpp
index d8bb7dc32..dcadd10f8 100644
--- a/src/storm/modelchecker/prctl/helper/SparseMdpPrctlHelper.cpp
+++ b/src/storm/modelchecker/prctl/helper/SparseMdpPrctlHelper.cpp
@@ -205,7 +205,7 @@ namespace storm {
             
             template<typename ValueType>
             struct SparseMdpHintType {
-                SparseMdpHintType() : eliminateEndComponents(false), computeUpperBounds(false), uniqueSolution(false) {
+                SparseMdpHintType() : eliminateEndComponents(false), computeUpperBounds(false), uniqueSolution(false), noEndComponents(false) {
                     // Intentionally left empty.
                 }
                 
@@ -265,6 +265,10 @@ namespace storm {
                     return uniqueSolution;
                 }
                 
+                bool hasNoEndComponents() const {
+                    return noEndComponents;
+                }
+                
                 boost::optional<std::vector<uint64_t>> schedulerHint;
                 boost::optional<std::vector<ValueType>> valueHint;
                 boost::optional<ValueType> lowerResultBound;
@@ -273,6 +277,7 @@ namespace storm {
                 bool eliminateEndComponents;
                 bool computeUpperBounds;
                 bool uniqueSolution;
+                bool noEndComponents;
             };
             
             template<typename ValueType>
@@ -329,29 +334,36 @@ namespace storm {
             SparseMdpHintType<ValueType> computeHints(Environment const& env, SolutionType const& type, ModelCheckerHint const& hint, storm::OptimizationDirection const& dir, storm::storage::SparseMatrix<ValueType> const& transitionMatrix, storm::storage::SparseMatrix<ValueType> const& backwardTransitions, storm::storage::BitVector const& maybeStates, storm::storage::BitVector const& phiStates, storm::storage::BitVector const& targetStates, bool produceScheduler, boost::optional<storm::storage::BitVector> const& selectedChoices = boost::none) {
                 SparseMdpHintType<ValueType> result;
 
-                // The solution to the min-max equation system is unique if we minimize until probabilities or
-                // maximize reachability rewards or if the hint tells us that there are no end-compontnes.
-                result.uniqueSolution = (dir == storm::solver::OptimizationDirection::Minimize && type == SolutionType::UntilProbabilities)
+                // There are no end components if we minimize until probabilities or
+                // maximize reachability rewards or if the hint tells us so.
+                result.noEndComponents = (dir == storm::solver::OptimizationDirection::Minimize && type == SolutionType::UntilProbabilities)
                                       || (dir == storm::solver::OptimizationDirection::Maximize && type == SolutionType::ExpectedRewards)
                                       || (hint.isExplicitModelCheckerHint() && hint.asExplicitModelCheckerHint<ValueType>().getNoEndComponentsInMaybeStates());
                 
+                // If there are no end components, the solution is unique. (Note that the other direction does not hold,
+                // e.g., end components in which infinite reward is collected.
+                result.uniqueSolution = result.hasNoEndComponents();
+                
                 // Check for requirements of the solver.
                 bool hasSchedulerHint = hint.isExplicitModelCheckerHint() && hint.template asExplicitModelCheckerHint<ValueType>().hasSchedulerHint();
                 storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
-                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, result.uniqueSolution, dir, hasSchedulerHint, produceScheduler);
+                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(env, result.uniqueSolution, result.noEndComponents, dir, hasSchedulerHint, produceScheduler);
                 if (requirements.hasEnabledRequirement()) {
                     // If the solver still requires no end-components, we have to eliminate them later.
-                    if (requirements.noEndComponents()) {
+                    if (requirements.uniqueSolution()) {
                         STORM_LOG_ASSERT(!result.hasUniqueSolution(), "The solver requires to eliminate the end components although the solution is already assumed to be unique.");
-                        STORM_LOG_DEBUG("Scheduling EC elimination, because the solver requires it.");
+                        STORM_LOG_DEBUG("Scheduling EC elimination, because the solver requires a unique solution.");
                         result.eliminateEndComponents = true;
                         // If end components have been eliminated we can assume a unique solution.
                         result.uniqueSolution = true;
-                        requirements.clearNoEndComponents();
+                        requirements.clearUniqueSolution();
+                        // If we compute until probabilities, we can even assume the absence of end components.
+                        // Note that in the case of minimizing expected rewards there might still be end components in which reward is collected.
+                        result.noEndComponents = (type == SolutionType::UntilProbabilities);
                     }
                     
-                    // If the solver requires an initial scheduler, compute one now.
-                    if (requirements.validInitialScheduler()) {
+                    // If the solver requires an initial scheduler, compute one now. Note that any scheduler is valid if there are no end components.
+                    if (requirements.validInitialScheduler() && !result.noEndComponents) {
                         STORM_LOG_DEBUG("Computing valid scheduler, because the solver requires it.");
                         result.schedulerHint = computeValidSchedulerHint(env, type, transitionMatrix, backwardTransitions, maybeStates, phiStates, targetStates);
                         requirements.clearValidInitialScheduler();
@@ -429,6 +441,7 @@ namespace storm {
                 std::unique_ptr<storm::solver::MinMaxLinearEquationSolver<ValueType>> solver = storm::solver::configureMinMaxLinearEquationSolver(env, std::move(goal), minMaxLinearEquationSolverFactory, std::move(submatrix));
                 solver->setRequirementsChecked();
                 solver->setHasUniqueSolution(hint.hasUniqueSolution());
+                solver->setHasNoEndComponents(hint.hasNoEndComponents());
                 if (hint.hasLowerResultBound()) {
                     solver->setLowerBound(hint.getLowerResultBound());
                 }
@@ -1348,7 +1361,7 @@ namespace storm {
                 
                 // Check for requirements of the solver.
                 storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
-                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(underlyingSolverEnvironment, true, goal.direction());
+                storm::solver::MinMaxLinearEquationSolverRequirements requirements = minMaxLinearEquationSolverFactory.getRequirements(underlyingSolverEnvironment, true, true, goal.direction());
                 requirements.clearBounds();
                 STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
 
@@ -1359,6 +1372,7 @@ namespace storm {
                 solver->setLowerBound(storm::utility::zero<ValueType>());
                 solver->setUpperBound(*std::max_element(lraValuesForEndComponents.begin(), lraValuesForEndComponents.end()));
                 solver->setHasUniqueSolution();
+                solver->setHasNoEndComponents();
                 solver->setRequirementsChecked();
                 solver->solveEquations(underlyingSolverEnvironment, sspResult, b);
                 
diff --git a/src/storm/modelchecker/prctl/helper/SymbolicMdpPrctlHelper.cpp b/src/storm/modelchecker/prctl/helper/SymbolicMdpPrctlHelper.cpp
index f52d7eb17..212151e5e 100644
--- a/src/storm/modelchecker/prctl/helper/SymbolicMdpPrctlHelper.cpp
+++ b/src/storm/modelchecker/prctl/helper/SymbolicMdpPrctlHelper.cpp
@@ -82,10 +82,10 @@ namespace storm {
                         requirements.clearValidInitialScheduler();
                     }
                     requirements.clearBounds();
-                    if (requirements.noEndComponents()) {
+                    if (requirements.uniqueSolution()) {
                         // Check whether there are end components
                         if (storm::utility::graph::performProb0E(model, transitionMatrix.notZero(), maybeStates, !maybeStates && model.getReachableStates()).isZero()) {
-                            requirements.clearNoEndComponents();
+                            requirements.clearUniqueSolution();
                         }
                     }
                     STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
@@ -253,10 +253,10 @@ namespace storm {
                         requirements.clearValidInitialScheduler();
                     }
                     requirements.clearLowerBounds();
-                    if (requirements.noEndComponents()) {
+                    if (requirements.uniqueSolution()) {
                         // Check whether there are end components
                         if (storm::utility::graph::performProb0E(model, transitionMatrixBdd, maybeStates, !maybeStates && model.getReachableStates()).isZero()) {
-                            requirements.clearNoEndComponents();
+                            requirements.clearUniqueSolution();
                         }
                     }
                     STORM_LOG_THROW(!requirements.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + requirements.getEnabledRequirementsAsString() + " not checked.");
diff --git a/src/storm/modelchecker/prctl/helper/rewardbounded/EpochModel.cpp b/src/storm/modelchecker/prctl/helper/rewardbounded/EpochModel.cpp
index e8b059f32..f48253fca 100644
--- a/src/storm/modelchecker/prctl/helper/rewardbounded/EpochModel.cpp
+++ b/src/storm/modelchecker/prctl/helper/rewardbounded/EpochModel.cpp
@@ -140,6 +140,7 @@ namespace storm {
                         storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType> minMaxLinearEquationSolverFactory;
                         minMaxSolver = minMaxLinearEquationSolverFactory.create(env, epochModel.epochMatrix);
                         minMaxSolver->setHasUniqueSolution();
+                        minMaxSolver->setHasNoEndComponents();
                         minMaxSolver->setOptimizationDirection(dir);
                         minMaxSolver->setCachingEnabled(true);
                         minMaxSolver->setTrackScheduler(true);
diff --git a/src/storm/modelchecker/results/ParetoCurveCheckResult.cpp b/src/storm/modelchecker/results/ParetoCurveCheckResult.cpp
index ffc34528a..c1cb62f32 100644
--- a/src/storm/modelchecker/results/ParetoCurveCheckResult.cpp
+++ b/src/storm/modelchecker/results/ParetoCurveCheckResult.cpp
@@ -65,13 +65,25 @@ namespace storm {
             out << points.size() << " Pareto optimal points found:" << std::endl;
             for(auto const& p : points) {
                 out << "   (";
-                for(auto it = p.begin(); it != p.end(); ++it){
-                    if(it != p.begin()){
+                for (auto it = p.begin(); it != p.end(); ++it){
+                    if (it != p.begin()){
                         out << ", ";
                     }
-                    out << std::setw(10) << *it;
+                    out << std::setw(storm::NumberTraits<ValueType>::IsExact ? 20 : 11) << *it;
                 }
-                out << " )" << std::endl;
+                out << " )";
+                if (storm::NumberTraits<ValueType>::IsExact) {
+                    out << " approx. ";
+                    out << "   (";
+                    for (auto it = p.begin(); it != p.end(); ++it) {
+                        if(it != p.begin()){
+                            out << ", ";
+                        }
+                        out << std::setw(11) << storm::utility::convertNumber<double>(*it);
+                    }
+                    out << " )";
+                }
+                out << std::endl;
             }
             return out;
         }
diff --git a/src/storm/solver/GurobiLpSolver.cpp b/src/storm/solver/GurobiLpSolver.cpp
index cd556c936..7056b5093 100644
--- a/src/storm/solver/GurobiLpSolver.cpp
+++ b/src/storm/solver/GurobiLpSolver.cpp
@@ -86,6 +86,9 @@ namespace storm {
             // Enable the following line to force Gurobi to be as precise about the binary variables as required by the given precision option.
             error = GRBsetdblparam(env, "IntFeasTol", storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance());
             STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to set Gurobi Parameter IntFeasTol (" << GRBgeterrormsg(env) << ", error code " << error << ").");
+            
+            // error = GRBsetintparam(env, "NumericFocus", 3);
+            // STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to set Gurobi Parameter NumericFocus (" << GRBgeterrormsg(env) << ", error code " << error << ").");
         }
         
         template<typename ValueType>
@@ -342,9 +345,9 @@ namespace storm {
             double value = 0;
             int error = GRBgetdblattrelement(model, GRB_DBL_ATTR_X, variableIndexPair->second, &value);
             STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to get Gurobi solution (" << GRBgeterrormsg(env) << ", error code " << error << ").");
-            STORM_LOG_THROW(std::abs(static_cast<int>(value) - value) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+            STORM_LOG_THROW(std::abs(std::round(value) - value) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
             
-            return static_cast<int_fast64_t>(value);
+            return static_cast<int_fast64_t>(std::round(value));
         }
         
         template<typename ValueType>
@@ -363,12 +366,12 @@ namespace storm {
             STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to get Gurobi solution (" << GRBgeterrormsg(env) << ", error code " << error << ").");
 
             if (value > 0.5) {
-                STORM_LOG_THROW(std::abs(static_cast<int>(value) - 1) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                STORM_LOG_THROW(std::abs(value - 1.0) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                return true;
             } else {
-                STORM_LOG_THROW(value <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                STORM_LOG_THROW(std::abs(value) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                return false;
             }
-            
-            return static_cast<bool>(value);
         }
         
         template<typename ValueType>
@@ -496,9 +499,9 @@ namespace storm {
             STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to set Gurobi solution index (" << GRBgeterrormsg(env) << ", error code " << error << ").");
             error = GRBgetdblattrelement(model, GRB_DBL_ATTR_Xn, variableIndexPair->second, &value);
             STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to get Gurobi solution (" << GRBgeterrormsg(env) << ", error code " << error << ").");
-            STORM_LOG_THROW(std::abs(static_cast<int>(value) - value) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+            STORM_LOG_THROW(std::abs(std::round(value) - value) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
             
-            return static_cast<int_fast64_t>(value);
+            return static_cast<int_fast64_t>(std::round(value));
         }
         
         template<typename ValueType>
@@ -520,12 +523,12 @@ namespace storm {
             STORM_LOG_THROW(error == 0, storm::exceptions::InvalidStateException, "Unable to get Gurobi solution (" << GRBgeterrormsg(env) << ", error code " << error << ").");
 
             if (value > 0.5) {
-                STORM_LOG_THROW(std::abs(static_cast<int>(value) - 1) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                STORM_LOG_THROW(std::abs(value - 1) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                return true;
             } else {
-                STORM_LOG_THROW(value <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                STORM_LOG_THROW(std::abs(value) <= storm::settings::getModule<storm::settings::modules::GurobiSettings>().getIntegerTolerance(), storm::exceptions::InvalidStateException, "Illegal value for integer variable in Gurobi solution (" << value << ").");
+                return false;
             }
-            
-            return static_cast<bool>(value);
         }
         
         template<typename ValueType>
diff --git a/src/storm/solver/IterativeMinMaxLinearEquationSolver.cpp b/src/storm/solver/IterativeMinMaxLinearEquationSolver.cpp
index becaa1d17..37f9c1ab5 100644
--- a/src/storm/solver/IterativeMinMaxLinearEquationSolver.cpp
+++ b/src/storm/solver/IterativeMinMaxLinearEquationSolver.cpp
@@ -241,7 +241,7 @@ namespace storm {
                 if (!this->hasUniqueSolution()) { // Traditional value iteration has no requirements if the solution is unique.
                     // Computing a scheduler is only possible if the solution is unique
                     if (this->isTrackSchedulerSet()) {
-                        requirements.requireNoEndComponents();
+                        requirements.requireUniqueSolution();
                     } else {
                         // As we want the smallest (largest) solution for maximizing (minimizing) equation systems, we have to approach the solution from below (above).
                         if (!direction || direction.get() == OptimizationDirection::Maximize) {
@@ -255,7 +255,7 @@ namespace storm {
             } else if (method == MinMaxMethod::IntervalIteration) {
                 // Interval iteration requires a unique solution and lower+upper bounds
                 if (!this->hasUniqueSolution()) {
-                    requirements.requireNoEndComponents();
+                    requirements.requireUniqueSolution();
                 }
                 requirements.requireBounds();
             } else if (method == MinMaxMethod::RationalSearch) {
@@ -263,22 +263,22 @@ namespace storm {
                 requirements.requireLowerBounds();
                 // The solution needs to be unique in case of minimizing or in cases where we want a scheduler.
                 if (!this->hasUniqueSolution() && (!direction || direction.get() == OptimizationDirection::Minimize || this->isTrackSchedulerSet())) {
-                    requirements.requireNoEndComponents();
+                    requirements.requireUniqueSolution();
                 }
             } else if (method == MinMaxMethod::PolicyIteration) {
-                if (!this->hasUniqueSolution()) {
+                // The initial scheduler shall not select an end component
+                if (!this->hasNoEndComponents()) {
                     requirements.requireValidInitialScheduler();
                 }
             } else if (method == MinMaxMethod::SoundValueIteration) {
                 if (!this->hasUniqueSolution()) {
-                    requirements.requireNoEndComponents();
+                    requirements.requireUniqueSolution();
                 }
                 requirements.requireBounds(false);
             } else if (method == MinMaxMethod::ViToPi) {
-                // Since we want to use value iteration to extract an initial scheduler, it helps to eliminate all end components first.
-                // TODO: We might get around this, as the initial value iteration scheduler is only a heuristic.
+                // Since we want to use value iteration to extract an initial scheduler, the solution has to be unique.
                 if (!this->hasUniqueSolution()) {
-                    requirements.requireNoEndComponents();
+                    requirements.requireUniqueSolution();
                 }
             } else {
                 STORM_LOG_THROW(false, storm::exceptions::InvalidEnvironmentException, "Unsupported technique for iterative MinMax linear equation solver.");
diff --git a/src/storm/solver/LpMinMaxLinearEquationSolver.cpp b/src/storm/solver/LpMinMaxLinearEquationSolver.cpp
index 78bc75752..2b371aae3 100644
--- a/src/storm/solver/LpMinMaxLinearEquationSolver.cpp
+++ b/src/storm/solver/LpMinMaxLinearEquationSolver.cpp
@@ -117,7 +117,7 @@ namespace storm {
             
             // In case we need to retrieve a scheduler, the solution has to be unique
             if (!this->hasUniqueSolution() && this->isTrackSchedulerSet()) {
-                requirements.requireNoEndComponents();
+                requirements.requireUniqueSolution();
             }
             
             requirements.requireBounds(false);
diff --git a/src/storm/solver/MinMaxLinearEquationSolver.cpp b/src/storm/solver/MinMaxLinearEquationSolver.cpp
index be724ea0d..df0fef614 100644
--- a/src/storm/solver/MinMaxLinearEquationSolver.cpp
+++ b/src/storm/solver/MinMaxLinearEquationSolver.cpp
@@ -19,7 +19,7 @@ namespace storm {
     namespace solver {
         
         template<typename ValueType>
-        MinMaxLinearEquationSolver<ValueType>::MinMaxLinearEquationSolver(OptimizationDirectionSetting direction) : direction(direction), trackScheduler(false), uniqueSolution(false), cachingEnabled(false), requirementsChecked(false) {
+        MinMaxLinearEquationSolver<ValueType>::MinMaxLinearEquationSolver(OptimizationDirectionSetting direction) : direction(direction), trackScheduler(false), uniqueSolution(false), noEndComponents(false), cachingEnabled(false), requirementsChecked(false) {
             // Intentionally left empty.
         }
         
@@ -57,7 +57,17 @@ namespace storm {
         
         template<typename ValueType>
         bool MinMaxLinearEquationSolver<ValueType>::hasUniqueSolution() const {
-            return uniqueSolution;
+            return uniqueSolution || noEndComponents;
+        }
+        
+        template<typename ValueType>
+        void MinMaxLinearEquationSolver<ValueType>::setHasNoEndComponents(bool value) {
+            noEndComponents = value;
+        }
+        
+        template<typename ValueType>
+        bool MinMaxLinearEquationSolver<ValueType>::hasNoEndComponents() const {
+            return noEndComponents;
         }
         
         template<typename ValueType>
@@ -161,11 +171,12 @@ namespace storm {
         }
 
         template<typename ValueType>
-        MinMaxLinearEquationSolverRequirements MinMaxLinearEquationSolverFactory<ValueType>::getRequirements(Environment const& env, bool hasUniqueSolution, boost::optional<storm::solver::OptimizationDirection> const& direction, bool hasInitialScheduler, bool trackScheduler) const {
+        MinMaxLinearEquationSolverRequirements MinMaxLinearEquationSolverFactory<ValueType>::getRequirements(Environment const& env, bool hasUniqueSolution, bool hasNoEndComponents, boost::optional<storm::solver::OptimizationDirection> const& direction, bool hasInitialScheduler, bool trackScheduler) const {
             // Create dummy solver and ask it for requirements.
             std::unique_ptr<MinMaxLinearEquationSolver<ValueType>> solver = this->create(env);
             solver->setTrackScheduler(trackScheduler);
             solver->setHasUniqueSolution(hasUniqueSolution);
+            solver->setHasNoEndComponents(hasNoEndComponents);
             return solver->getRequirements(env, direction, hasInitialScheduler);
         }
         
diff --git a/src/storm/solver/MinMaxLinearEquationSolver.h b/src/storm/solver/MinMaxLinearEquationSolver.h
index 7ca89dfb9..5ab51dc95 100644
--- a/src/storm/solver/MinMaxLinearEquationSolver.h
+++ b/src/storm/solver/MinMaxLinearEquationSolver.h
@@ -76,10 +76,24 @@ namespace storm {
             void setHasUniqueSolution(bool value = true);
             
             /*!
-             * Retrieves whether the solution to the min max equation system is assumed to be unique
+             * Retrieves whether the solution to the min max equation system is assumed to be unique.
+             * Note that having no end components implies that the solution is unique. Thus, this also returns true if
+             * `hasNoEndComponents()` returns true.
+             * Also note that a unique solution does not imply the absence of ECs, because, e.g. in Rmin properties there
+             * can still be ECs in which infinite reward is collected.
              */
             bool hasUniqueSolution() const;
             
+            /*!
+             * Sets whether the min max equation system is known to not have any end components
+             */
+            void setHasNoEndComponents(bool value = true);
+            
+            /*!
+             * Retrieves whether the min max equation system is known to not have any end components
+             */
+            bool hasNoEndComponents() const;
+            
             /*!
              * Sets whether schedulers are generated when solving equation systems. If the argument is false, the currently
              * stored scheduler (if any) is deleted.
@@ -173,6 +187,9 @@ namespace storm {
             /// Whether the solver can assume that the min-max equation system has a unique solution
             bool uniqueSolution;
             
+            /// Whether the solver can assume that the min-max equation system has no end components
+            bool noEndComponents;
+            
             /// Whether some of the generated data during solver calls should be cached.
             bool cachingEnabled;
             
@@ -194,7 +211,7 @@ namespace storm {
              * Retrieves the requirements of the solver that would be created when calling create() right now. The
              * requirements are guaranteed to be ordered according to their appearance in the SolverRequirement type.
              */
-            MinMaxLinearEquationSolverRequirements getRequirements(Environment const& env, bool hasUniqueSolution = false, boost::optional<storm::solver::OptimizationDirection> const& direction = boost::none, bool hasInitialScheduler = false, bool trackScheduler = false) const;
+            MinMaxLinearEquationSolverRequirements getRequirements(Environment const& env, bool hasUniqueSolution = false, bool hasNoEndComponents = false, boost::optional<storm::solver::OptimizationDirection> const& direction = boost::none, bool hasInitialScheduler = false, bool trackScheduler = false) const;
             void setRequirementsChecked(bool value = true);
             bool isRequirementsCheckedSet() const;
 
diff --git a/src/storm/solver/MinMaxLinearEquationSolverRequirements.cpp b/src/storm/solver/MinMaxLinearEquationSolverRequirements.cpp
index e16bbdf54..3e5e867ab 100644
--- a/src/storm/solver/MinMaxLinearEquationSolverRequirements.cpp
+++ b/src/storm/solver/MinMaxLinearEquationSolverRequirements.cpp
@@ -7,8 +7,8 @@ namespace storm {
             // Intentionally left empty.
         }
         
-        MinMaxLinearEquationSolverRequirements& MinMaxLinearEquationSolverRequirements::requireNoEndComponents(bool critical) {
-            noEndComponentsRequirement.enable(critical);
+        MinMaxLinearEquationSolverRequirements& MinMaxLinearEquationSolverRequirements::requireUniqueSolution(bool critical) {
+            uniqueSolutionRequirement.enable(critical);
             return *this;
         }
         
@@ -33,8 +33,8 @@ namespace storm {
             return *this;
         }
         
-        SolverRequirement const&  MinMaxLinearEquationSolverRequirements::noEndComponents() const {
-            return noEndComponentsRequirement;
+        SolverRequirement const&  MinMaxLinearEquationSolverRequirements::uniqueSolution() const {
+            return uniqueSolutionRequirement;
         }
         
         SolverRequirement const&  MinMaxLinearEquationSolverRequirements::validInitialScheduler() const {
@@ -51,16 +51,15 @@ namespace storm {
         
         SolverRequirement const&  MinMaxLinearEquationSolverRequirements::get(Element const& element) const {
             switch (element) {
-                case Element::NoEndComponents: return noEndComponents(); break;
+                case Element::UniqueSolution: return uniqueSolution(); break;
                 case Element::ValidInitialScheduler: return validInitialScheduler(); break;
                 case Element::LowerBounds: return lowerBounds(); break;
                 case Element::UpperBounds: return upperBounds(); break;
             }
         }
         
-        void MinMaxLinearEquationSolverRequirements::clearNoEndComponents() {
-            noEndComponentsRequirement.clear();
-            validInitialSchedulerRequirement.clear();
+        void MinMaxLinearEquationSolverRequirements::clearUniqueSolution() {
+            uniqueSolutionRequirement.clear();
         }
         
         void MinMaxLinearEquationSolverRequirements::clearValidInitialScheduler() {
@@ -81,20 +80,20 @@ namespace storm {
         }
         
         bool MinMaxLinearEquationSolverRequirements::hasEnabledRequirement() const {
-            return noEndComponentsRequirement || validInitialSchedulerRequirement || lowerBoundsRequirement || upperBoundsRequirement;
+            return uniqueSolutionRequirement || validInitialSchedulerRequirement || lowerBoundsRequirement || upperBoundsRequirement;
         }
         
         bool MinMaxLinearEquationSolverRequirements::hasEnabledCriticalRequirement() const {
-            return noEndComponentsRequirement.isCritical() || validInitialSchedulerRequirement.isCritical() || lowerBoundsRequirement.isCritical() || upperBoundsRequirement.isCritical();
+            return uniqueSolutionRequirement.isCritical() || validInitialSchedulerRequirement.isCritical() || lowerBoundsRequirement.isCritical() || upperBoundsRequirement.isCritical();
         }
         
         std::string MinMaxLinearEquationSolverRequirements::getEnabledRequirementsAsString() const {
             std::string res = "[";
             bool first = true;
-            if (noEndComponents()) {
+            if (uniqueSolution()) {
                 if (!first) { res += ", "; } else {first = false;}
-                res += "NoEndComponents";
-                if (noEndComponents().isCritical()) {
+                res += "UniqueSolution";
+                if (uniqueSolution().isCritical()) {
                     res += "(mandatory)";
                 }
             }
diff --git a/src/storm/solver/MinMaxLinearEquationSolverRequirements.h b/src/storm/solver/MinMaxLinearEquationSolverRequirements.h
index 06ba623c1..38f2f1849 100644
--- a/src/storm/solver/MinMaxLinearEquationSolverRequirements.h
+++ b/src/storm/solver/MinMaxLinearEquationSolverRequirements.h
@@ -15,7 +15,7 @@ namespace storm {
                 // Requirements that are related to the graph structure of the system. Note that the requirements in this
                 // category are to be interpreted incrementally in the following sense: whenever the system has no end
                 // components then automatically both requirements are fulfilled.
-                NoEndComponents,
+                UniqueSolution,
                 ValidInitialScheduler,
                 
                 // Requirements that are related to bounds for the actual solution.
@@ -27,19 +27,19 @@ namespace storm {
             
             MinMaxLinearEquationSolverRequirements(LinearEquationSolverRequirements const& linearEquationSolverRequirements = LinearEquationSolverRequirements());
             
-            MinMaxLinearEquationSolverRequirements& requireNoEndComponents(bool critical = true);
+            MinMaxLinearEquationSolverRequirements& requireUniqueSolution(bool critical = true);
             MinMaxLinearEquationSolverRequirements& requireValidInitialScheduler(bool critical = true);
             MinMaxLinearEquationSolverRequirements& requireLowerBounds(bool critical = true);
             MinMaxLinearEquationSolverRequirements& requireUpperBounds(bool critical = true);
             MinMaxLinearEquationSolverRequirements& requireBounds(bool critical = true);
 
-            SolverRequirement const& noEndComponents() const;
+            SolverRequirement const& uniqueSolution() const;
             SolverRequirement const& validInitialScheduler() const;
             SolverRequirement const& lowerBounds() const;
             SolverRequirement const& upperBounds() const;
             SolverRequirement const& get(Element const& element) const;
             
-            void clearNoEndComponents();
+            void clearUniqueSolution();
             void clearValidInitialScheduler();
             void clearLowerBounds();
             void clearUpperBounds();
@@ -54,7 +54,7 @@ namespace storm {
             std::string getEnabledRequirementsAsString() const;
             
         private:
-            SolverRequirement noEndComponentsRequirement;
+            SolverRequirement uniqueSolutionRequirement;
             SolverRequirement validInitialSchedulerRequirement;
             SolverRequirement lowerBoundsRequirement;
             SolverRequirement upperBoundsRequirement;
diff --git a/src/storm/solver/SmtSolver.h b/src/storm/solver/SmtSolver.h
index 6d842500c..27a506ff0 100644
--- a/src/storm/solver/SmtSolver.h
+++ b/src/storm/solver/SmtSolver.h
@@ -50,7 +50,7 @@ namespace storm {
                 storm::expressions::ExpressionManager const& getManager() const;
                 
             private:
-                // The expression manager responsible for the variableswhose value can be requested via this model
+                // The expression manager responsible for the variables whose value can be requested via this model
                 // reference.
                 storm::expressions::ExpressionManager const& manager;
 			};
diff --git a/src/storm/solver/SymbolicMinMaxLinearEquationSolver.cpp b/src/storm/solver/SymbolicMinMaxLinearEquationSolver.cpp
index c54df3acf..2ba997ed8 100644
--- a/src/storm/solver/SymbolicMinMaxLinearEquationSolver.cpp
+++ b/src/storm/solver/SymbolicMinMaxLinearEquationSolver.cpp
@@ -455,7 +455,7 @@ namespace storm {
             } else if (method == MinMaxMethod::RationalSearch) {
                 requirements.requireLowerBounds();
                 if (!this->hasUniqueSolution() && (!direction || direction.get() == storm::solver::OptimizationDirection::Minimize)) {
-                    requirements.requireNoEndComponents();
+                    requirements.requireUniqueSolution();
                 }
             } else {
                 STORM_LOG_THROW(false, storm::exceptions::InvalidEnvironmentException, "The selected min max technique is not supported by this solver.");
diff --git a/src/storm/solver/TopologicalMinMaxLinearEquationSolver.cpp b/src/storm/solver/TopologicalMinMaxLinearEquationSolver.cpp
index 998c9d782..d98902694 100644
--- a/src/storm/solver/TopologicalMinMaxLinearEquationSolver.cpp
+++ b/src/storm/solver/TopologicalMinMaxLinearEquationSolver.cpp
@@ -181,6 +181,7 @@ namespace storm {
             }
             this->sccSolver->setMatrix(*this->A);
             this->sccSolver->setHasUniqueSolution(this->hasUniqueSolution());
+            this->sccSolver->setHasNoEndComponents(this->hasNoEndComponents());
             this->sccSolver->setBoundsFromOtherSolver(*this);
             this->sccSolver->setTrackScheduler(this->isTrackSchedulerSet());
             if (this->hasInitialScheduler()) {
@@ -194,10 +195,12 @@ namespace storm {
             if (req.lowerBounds() && this->hasLowerBound()) {
                 req.clearLowerBounds();
             }
-            
-            // If all requirements of the underlying solver have been passed as requirements to the calling site, we can
-            // assume that the system has no end components if the underlying solver requires this.
-            req.clearNoEndComponents();
+            if (req.validInitialScheduler() && this->hasInitialScheduler()) {
+                req.clearValidInitialScheduler();
+            }
+            if (req.uniqueSolution() && this->hasUniqueSolution()) {
+                req.clearUniqueSolution();
+            }
             STORM_LOG_THROW(!req.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + req.getEnabledRequirementsAsString() + " not checked.");
             this->sccSolver->setRequirementsChecked(true);
             
@@ -217,6 +220,7 @@ namespace storm {
                 this->sccSolver->setCachingEnabled(true);
             }
             this->sccSolver->setHasUniqueSolution(this->hasUniqueSolution());
+            this->sccSolver->setHasNoEndComponents(this->hasNoEndComponents());
             this->sccSolver->setTrackScheduler(this->isTrackSchedulerSet());
             
             // SCC Matrix
@@ -269,6 +273,9 @@ namespace storm {
             if (req.validInitialScheduler() && this->hasInitialScheduler()) {
                 req.clearValidInitialScheduler();
             }
+            if (req.uniqueSolution() && this->hasUniqueSolution()) {
+                req.clearUniqueSolution();
+            }
             STORM_LOG_THROW(!req.hasEnabledCriticalRequirement(), storm::exceptions::UncheckedRequirementException, "Solver requirements " + req.getEnabledRequirementsAsString() + " not checked.");
             this->sccSolver->setRequirementsChecked(true);
 
@@ -291,7 +298,7 @@ namespace storm {
         template<typename ValueType>
         MinMaxLinearEquationSolverRequirements TopologicalMinMaxLinearEquationSolver<ValueType>::getRequirements(Environment const& env, boost::optional<storm::solver::OptimizationDirection> const& direction, bool const& hasInitialScheduler) const {
             // Return the requirements of the underlying solver
-            return GeneralMinMaxLinearEquationSolverFactory<ValueType>().getRequirements(getEnvironmentForUnderlyingSolver(env), this->hasUniqueSolution(), direction, hasInitialScheduler);
+            return GeneralMinMaxLinearEquationSolverFactory<ValueType>().getRequirements(getEnvironmentForUnderlyingSolver(env), this->hasUniqueSolution(), this->hasNoEndComponents(), direction, hasInitialScheduler, this->isTrackSchedulerSet());
         }
         
         template<typename ValueType>
diff --git a/src/storm/storage/Qvbs.cpp b/src/storm/storage/Qvbs.cpp
index b9d392386..cb810c948 100644
--- a/src/storm/storage/Qvbs.cpp
+++ b/src/storm/storage/Qvbs.cpp
@@ -97,7 +97,8 @@ namespace storm {
                     }
                 } else {
                     constantDefinitions.push_back("");
-                    janiFiles.push_back(janiFileName);
+                    janiFiles.push_back(modelPath + "/" + janiFileName);
+                    instanceInfos.push_back(janiFileName);
                 }
             }
         }
diff --git a/src/storm/storage/expressions/UnaryNumericalFunctionExpression.cpp b/src/storm/storage/expressions/UnaryNumericalFunctionExpression.cpp
index 1b4b5d0da..d9fbaae5a 100644
--- a/src/storm/storage/expressions/UnaryNumericalFunctionExpression.cpp
+++ b/src/storm/storage/expressions/UnaryNumericalFunctionExpression.cpp
@@ -71,10 +71,13 @@ namespace storm {
                 if (operandSimplified->hasIntegerType()) {
                     int_fast64_t value = operandSimplified->evaluateAsInt();
                     switch (this->getOperatorType()) {
-                        case OperatorType::Minus: value = -value; break;
+                        case OperatorType::Minus:
+                            value = -value;
+                            break;
                         // Nothing to be done for the other cases:
-                        // case OperatorType::Floor:
-                        // case OperatorType::Ceil:
+                        case OperatorType::Floor:
+                        case OperatorType::Ceil:
+                            break;
                     }
                     return std::shared_ptr<BaseExpression>(new IntegerLiteralExpression(this->getManager(), value));
                 } else {
diff --git a/src/storm/storage/jani/Model.cpp b/src/storm/storage/jani/Model.cpp
index 3aa05a20c..9d61a10cc 100644
--- a/src/storm/storage/jani/Model.cpp
+++ b/src/storm/storage/jani/Model.cpp
@@ -789,10 +789,19 @@ namespace storm {
             return *expressionManager;
         }
         
+        bool Model::hasNonTrivialRewardExpression() const {
+            return !nonTrivialRewardModels.empty();
+        }
+        
+        bool Model::isNonTrivialRewardModelExpression(std::string const& identifier) const {
+            return nonTrivialRewardModels.count(identifier) > 0;
+        }
+        
         bool Model::addNonTrivialRewardExpression(std::string const& identifier, storm::expressions::Expression const& rewardExpression) {
-            if (nonTrivialRewardModels.count(identifier) > 0) {
+            if (isNonTrivialRewardModelExpression(identifier)) {
                 return false;
             } else {
+                STORM_LOG_THROW(!globalVariables.hasVariable(identifier) || !globalVariables.getVariable(identifier).isTransient(), storm::exceptions::InvalidArgumentException, "Non trivial reward expression with identifier '" << identifier << "' clashes with global transient variable of the same name.");
                 nonTrivialRewardModels.emplace(identifier, rewardExpression);
                 return true;
             }
diff --git a/src/storm/storage/jani/Model.h b/src/storm/storage/jani/Model.h
index 5db1551d5..c96611940 100644
--- a/src/storm/storage/jani/Model.h
+++ b/src/storm/storage/jani/Model.h
@@ -285,9 +285,19 @@ namespace storm {
              * Retrieves the manager responsible for the expressions in the JANI model.
              */
             storm::expressions::ExpressionManager& getExpressionManager() const;
-
+            
+            /*!
+             * Returns true iff there is a non-trivial reward model, i.e., a reward model that does not consist of a single, global, numerical, transient variable.
+             */
+            bool hasNonTrivialRewardExpression() const;
+            
+            /*!
+             * Returns true iff the given identifier corresponds to a non-trivial reward expression i.e., a reward model that does not consist of a single, global, numerical, transient variable.
+             */
+            bool isNonTrivialRewardModelExpression(std::string const& identifier) const;
+            
             /*!
-             * Adds a (non-trivial) reward model, i.e., a reward model that does not consist of a single, global, numerical variable.
+             * Adds a  reward expression, i.e., a reward model that does not consist of a single, global, numerical, transient variable.
              * @return true if a new reward model was added and false if a reward model with this identifier is already present in the model (in which case no reward model is added)
              */
             bool addNonTrivialRewardExpression(std::string const& identifier, storm::expressions::Expression const& rewardExpression);
@@ -577,7 +587,7 @@ namespace storm {
             bool undefinedConstantsAreGraphPreserving() const;
             
             /*!
-             * Lifts the common edge destination assignments to edge assignments.
+             * Lifts the common edge destination assignments of transient variables to edge assignments.
              * @param maxLevel the maximum level of assignments that are to be lifted.
              */
             void liftTransientEdgeDestinationAssignments(int64_t maxLevel = 0);
diff --git a/src/storm/storm.cpp b/src/storm/storm.cpp
index 3cba6dd3d..a2d4aa0db 100644
--- a/src/storm/storm.cpp
+++ b/src/storm/storm.cpp
@@ -1,3 +1,12 @@
+/*! \mainpage Storm - A Modern Probabilistic Model Checker
+ *
+ * This document contains the Doxygen documentation of the Storm source code.
+ *
+ * \section more_info More information
+ * For more information, installation guides and tutorials on how to use Storm, visit the Storm website: http://www.stormchecker.org.
+ */
+
+
 #include "storm/utility/macros.h"
 #include "storm/exceptions/BaseException.h"
 
diff --git a/src/test/storm-dft/api/DftModelCheckerTest.cpp b/src/test/storm-dft/api/DftModelCheckerTest.cpp
index 2600252a5..e011eb12c 100644
--- a/src/test/storm-dft/api/DftModelCheckerTest.cpp
+++ b/src/test/storm-dft/api/DftModelCheckerTest.cpp
@@ -11,6 +11,7 @@ namespace {
         bool useSR;
         bool useMod;
         bool useDC;
+        bool allowDCForRelevantEvents;
     };
 
     class NoOptimizationsConfig {
@@ -18,7 +19,7 @@ namespace {
         typedef double ValueType;
 
         static DftAnalysisConfig createConfig() {
-            return DftAnalysisConfig{false, false, false};
+            return DftAnalysisConfig{false, false, false, true};
         }
     };
 
@@ -27,7 +28,7 @@ namespace {
         typedef double ValueType;
 
         static DftAnalysisConfig createConfig() {
-            return DftAnalysisConfig{false, false, true};
+            return DftAnalysisConfig{false, false, true, true};
         }
     };
 
@@ -36,7 +37,7 @@ namespace {
         typedef double ValueType;
 
         static DftAnalysisConfig createConfig() {
-            return DftAnalysisConfig{false, true, false};
+            return DftAnalysisConfig{false, true, false, true};
         }
     };
 
@@ -45,7 +46,7 @@ namespace {
         typedef double ValueType;
 
         static DftAnalysisConfig createConfig() {
-            return DftAnalysisConfig{true, false, false};
+            return DftAnalysisConfig{true, false, false, true};
         }
     };
 
@@ -54,7 +55,7 @@ namespace {
         typedef double ValueType;
 
         static DftAnalysisConfig createConfig() {
-            return DftAnalysisConfig{true, true, true};
+            return DftAnalysisConfig{true, true, true, true};
         }
     };
 
@@ -77,11 +78,26 @@ namespace {
             std::string property = "Tmin=? [F \"failed\"]";
             std::vector<std::shared_ptr<storm::logic::Formula const>> properties = storm::api::extractFormulasFromProperties(storm::api::parseProperties(property));
             std::set<size_t> relevantEvents;
-            if (config.useDC) {
+            if (!config.useDC) {
                 relevantEvents = dft->getAllIds();
             }
             typename storm::modelchecker::DFTModelChecker<double>::dft_results results = storm::api::analyzeDFT<double>(*dft, properties, config.useSR, config.useMod,
-                                                                                                                        relevantEvents, true);
+                                                                                                                        relevantEvents, config.allowDCForRelevantEvents);
+            return boost::get<double>(results[0]);
+        }
+
+        double analyzeReliability(std::string const& file, double bound) {
+            std::shared_ptr<storm::storage::DFT<double>> dft = storm::api::loadDFTGalileoFile<double>(file);
+            EXPECT_TRUE(storm::api::isWellFormed(*dft));
+            std::string property = "Pmin=? [F<=" + std::to_string(bound) + " \"failed\"]";
+            std::vector<std::shared_ptr<storm::logic::Formula const>> properties = storm::api::extractFormulasFromProperties(
+                    storm::api::parseProperties(property));
+            std::set<size_t> relevantEvents;
+            if (!config.useDC) {
+                relevantEvents = dft->getAllIds();
+            }
+            typename storm::modelchecker::DFTModelChecker<double>::dft_results results = storm::api::analyzeDFT<double>(*dft, properties, config.useSR, config.useMod,
+                                                                                                                        relevantEvents, config.allowDCForRelevantEvents);
             return boost::get<double>(results[0]);
         }
 
@@ -205,4 +221,15 @@ namespace {
         EXPECT_FLOAT_EQ(result, storm::utility::infinity<double>());
     }
 
+    TYPED_TEST(DftModelCheckerTest, Symmetry) {
+        double result = this->analyzeMTTF(STORM_TEST_RESOURCES_DIR "/dft/symmetry6.dft");
+        EXPECT_FLOAT_EQ(result, 1.373226284);
+        result = this->analyzeReliability(STORM_TEST_RESOURCES_DIR "/dft/symmetry6.dft", 1.0);
+        EXPECT_FLOAT_EQ(result, 0.3421934224);
+    }
+
+    TYPED_TEST(DftModelCheckerTest, HecsReliability) {
+        double result = this->analyzeReliability(STORM_TEST_RESOURCES_DIR "/dft/hecs_2_2.dft", 1.0);
+        EXPECT_FLOAT_EQ(result, 0.00021997582);
+    }
 }
diff --git a/src/test/storm-dft/api/DftParserTest.cpp b/src/test/storm-dft/api/DftParserTest.cpp
index 4a19aed08..dfa7ad769 100644
--- a/src/test/storm-dft/api/DftParserTest.cpp
+++ b/src/test/storm-dft/api/DftParserTest.cpp
@@ -20,4 +20,14 @@ namespace {
         EXPECT_EQ(2ul, dft->nrBasicElements());
         EXPECT_TRUE(storm::api::isWellFormed(*dft));
     }
+
+    TEST(DftParserTest, CatchCycles) {
+        std::string file = STORM_TEST_RESOURCES_DIR "/dft/cyclic.dft";
+        EXPECT_THROW(storm::api::loadDFTGalileoFile<double>(file), storm::exceptions::WrongFormatException);
+    }
+
+    TEST(DftParserTest, CatchSeqChildren) {
+        std::string file = STORM_TEST_RESOURCES_DIR "/dft/seqChild.dft";
+        EXPECT_THROW(storm::api::loadDFTGalileoFile<double>(file), storm::exceptions::WrongFormatException);
+    }
 }
diff --git a/src/test/storm-dft/api/DftSmtTest.cpp b/src/test/storm-dft/api/DftSmtTest.cpp
new file mode 100644
index 000000000..95bcad150
--- /dev/null
+++ b/src/test/storm-dft/api/DftSmtTest.cpp
@@ -0,0 +1,59 @@
+#include "gtest/gtest.h"
+#include "storm-config.h"
+
+#include "storm-dft/api/storm-dft.h"
+
+namespace {
+    TEST(DftSmtTest, AndTest) {
+        std::shared_ptr<storm::storage::DFT<double>> dft =
+                storm::api::loadDFTGalileoFile<double>(STORM_TEST_RESOURCES_DIR "/dft/and.dft");
+        EXPECT_TRUE(storm::api::isWellFormed(*dft));
+        storm::modelchecker::DFTASFChecker smtChecker(*dft);
+        smtChecker.convert();
+        smtChecker.toSolver();
+        EXPECT_EQ(smtChecker.checkTleNeverFailed(), storm::solver::SmtSolver::CheckResult::Unsat);
+    }
+
+    TEST(DftSmtTest, PandTest) {
+        std::shared_ptr<storm::storage::DFT<double>> dft =
+                storm::api::loadDFTGalileoFile<double>(STORM_TEST_RESOURCES_DIR "/dft/pand.dft");
+        EXPECT_TRUE(storm::api::isWellFormed(*dft));
+        storm::modelchecker::DFTASFChecker smtChecker(*dft);
+        smtChecker.convert();
+        smtChecker.toSolver();
+        EXPECT_EQ(smtChecker.checkTleNeverFailed(), storm::solver::SmtSolver::CheckResult::Sat);
+    }
+
+    TEST(DftSmtTest, SpareTest) {
+        std::shared_ptr<storm::storage::DFT<double>> dft =
+                storm::api::loadDFTGalileoFile<double>(STORM_TEST_RESOURCES_DIR "/dft/spare_two_modules.dft");
+        EXPECT_TRUE(storm::api::isWellFormed(*dft));
+        storm::modelchecker::DFTASFChecker smtChecker(*dft);
+        smtChecker.convert();
+        smtChecker.toSolver();
+        EXPECT_EQ(smtChecker.checkTleFailsWithLeq(2), storm::solver::SmtSolver::CheckResult::Unsat);
+        EXPECT_EQ(smtChecker.checkTleFailsWithEq(3), storm::solver::SmtSolver::CheckResult::Sat);
+    }
+
+    TEST(DftSmtTest, BoundTest) {
+        std::shared_ptr<storm::storage::DFT<double>> dft =
+                storm::api::loadDFTGalileoFile<double>(STORM_TEST_RESOURCES_DIR "/dft/spare5.dft");
+        EXPECT_TRUE(storm::api::isWellFormed(*dft));
+        storm::modelchecker::DFTASFChecker smtChecker(*dft);
+        smtChecker.convert();
+        smtChecker.toSolver();
+        EXPECT_EQ(smtChecker.getLeastFailureBound(30), uint64_t(2));
+        EXPECT_EQ(smtChecker.getAlwaysFailedBound(30), uint64_t(4));
+    }
+
+    TEST(DftSmtTest, FDEPBoundTest) {
+        std::shared_ptr<storm::storage::DFT<double>> dft =
+                storm::api::loadDFTGalileoFile<double>(STORM_TEST_RESOURCES_DIR "/dft/fdep_bound.dft");
+        EXPECT_TRUE(storm::api::isWellFormed(*dft));
+        storm::modelchecker::DFTASFChecker smtChecker(*dft);
+        smtChecker.convert();
+        smtChecker.toSolver();
+        EXPECT_EQ(smtChecker.getLeastFailureBound(30), uint64_t(1));
+        EXPECT_EQ(smtChecker.getAlwaysFailedBound(30), uint64_t(5));
+    }
+}
\ No newline at end of file
diff --git a/src/test/storm/solver/MinMaxLinearEquationSolverTest.cpp b/src/test/storm/solver/MinMaxLinearEquationSolverTest.cpp
index 725590bc8..ef565f946 100644
--- a/src/test/storm/solver/MinMaxLinearEquationSolverTest.cpp
+++ b/src/test/storm/solver/MinMaxLinearEquationSolverTest.cpp
@@ -149,6 +149,7 @@ namespace {
         auto factory = storm::solver::GeneralMinMaxLinearEquationSolverFactory<ValueType>();
         auto solver = factory.create(this->env(), A);
         solver->setHasUniqueSolution(true);
+        solver->setHasNoEndComponents(true);
         solver->setBounds(this->parseNumber("0"), this->parseNumber("2"));
         storm::solver::MinMaxLinearEquationSolverRequirements req = solver->getRequirements(this->env());
         req.clearBounds();
diff --git a/src/test/storm/transformer/EndComponentEliminatorTest.cpp b/src/test/storm/transformer/EndComponentEliminatorTest.cpp
index 972f4d812..a9c989ea4 100644
--- a/src/test/storm/transformer/EndComponentEliminatorTest.cpp
+++ b/src/test/storm/transformer/EndComponentEliminatorTest.cpp
@@ -7,26 +7,26 @@ TEST(NeutralECRemover, SimpleModelTest) {
     
     
     storm::storage::SparseMatrixBuilder<double> builder(12, 5, 19, true, true, 5);
-    ASSERT_NO_THROW(builder.newRowGroup(0));
+    ASSERT_NO_THROW(builder.newRowGroup(0)); // Transitions for state 0:
     ASSERT_NO_THROW(builder.addNextValue(0, 0, 1.0));
     ASSERT_NO_THROW(builder.addNextValue(1, 1, 0.3));
     ASSERT_NO_THROW(builder.addNextValue(1, 2, 0.1));
     ASSERT_NO_THROW(builder.addNextValue(1, 3, 0.4));
     ASSERT_NO_THROW(builder.addNextValue(1, 4, 0.2));
-    ASSERT_NO_THROW(builder.newRowGroup(2));
+    ASSERT_NO_THROW(builder.newRowGroup(2)); // Transitions for state 1:
     ASSERT_NO_THROW(builder.addNextValue(2, 1, 0.7));
     ASSERT_NO_THROW(builder.addNextValue(2, 3, 0.3));
     ASSERT_NO_THROW(builder.addNextValue(3, 1, 0.1));
     ASSERT_NO_THROW(builder.addNextValue(3, 4, 0.9));
     ASSERT_NO_THROW(builder.addNextValue(4, 1, 0.2));
     ASSERT_NO_THROW(builder.addNextValue(4, 4, 0.8));
-    ASSERT_NO_THROW(builder.newRowGroup(5));
+    ASSERT_NO_THROW(builder.newRowGroup(5)); // Transitions for state 2:
     ASSERT_NO_THROW(builder.addNextValue(5, 2, 1.0));
-    ASSERT_NO_THROW(builder.newRowGroup(6));
+    ASSERT_NO_THROW(builder.newRowGroup(6)); // Transitions for state 3:
     ASSERT_NO_THROW(builder.addNextValue(6, 1, 1.0));
     ASSERT_NO_THROW(builder.addNextValue(7, 2, 1.0));
     ASSERT_NO_THROW(builder.addNextValue(8, 3, 1.0));
-    ASSERT_NO_THROW(builder.newRowGroup(9));
+    ASSERT_NO_THROW(builder.newRowGroup(9)); // Transitions for state 4:
     ASSERT_NO_THROW(builder.addNextValue(9, 4, 1.0));
     ASSERT_NO_THROW(builder.addNextValue(10, 1, 0.4));
     ASSERT_NO_THROW(builder.addNextValue(10, 4, 0.6));
@@ -48,28 +48,31 @@ TEST(NeutralECRemover, SimpleModelTest) {
     allowEmptyRows.set(1, false);
     allowEmptyRows.set(4, false);
     
-    
     auto res = storm::transformer::EndComponentEliminator<double>::transform(matrix, subsystem, possibleEcRows, allowEmptyRows);
     
     // Expected data
+    // State 0 is a singleton EC that is replaced by state 2
+    // States 1,4 build an EC that will be eliminated and replaced by state 1.
+    // State 2 is not part of the subsystem and thus disregarded
+    // State 3 is the only state that is kept as it is (except of the transition to 2) and will now be represented by state 0
     storm::storage::SparseMatrixBuilder<double> expectedBuilder(8, 3, 8, true, true, 3);
     ASSERT_NO_THROW(expectedBuilder.newRowGroup(0));
-    ASSERT_NO_THROW(expectedBuilder.addNextValue(0, 2, 1.0));
+    ASSERT_NO_THROW(expectedBuilder.addNextValue(0, 1, 1.0));
     ASSERT_NO_THROW(expectedBuilder.addNextValue(2, 0, 1.0));
     ASSERT_NO_THROW(expectedBuilder.newRowGroup(3));
-    ASSERT_NO_THROW(expectedBuilder.addNextValue(3, 0, 0.4));
-    ASSERT_NO_THROW(expectedBuilder.addNextValue(3, 2, 0.5));
-    ASSERT_NO_THROW(expectedBuilder.newRowGroup(5));
+    ASSERT_NO_THROW(expectedBuilder.addNextValue(3, 0, 0.3));
+    ASSERT_NO_THROW(expectedBuilder.addNextValue(3, 1, 0.7));
+    ASSERT_NO_THROW(expectedBuilder.addNextValue(4, 1, 1.0));
     ASSERT_NO_THROW(expectedBuilder.addNextValue(5, 0, 1.0));
-    ASSERT_NO_THROW(expectedBuilder.addNextValue(6, 0, 0.3));
-    ASSERT_NO_THROW(expectedBuilder.addNextValue(6, 2, 0.7));
-    ASSERT_NO_THROW(expectedBuilder.addNextValue(7, 2, 1.0));
+    ASSERT_NO_THROW(expectedBuilder.newRowGroup(6));
+    ASSERT_NO_THROW(expectedBuilder.addNextValue(6, 0, 0.4));
+    ASSERT_NO_THROW(expectedBuilder.addNextValue(6, 1, 0.5));
     storm::storage::SparseMatrix<double> expectedMatrix;
     ASSERT_NO_THROW(expectedMatrix = expectedBuilder.build());
     
-    std::vector<uint_fast64_t> expectedNewToOldRowMapping = {6,7,8,1,0,11,2,3};
+    std::vector<uint_fast64_t> expectedNewToOldRowMapping = {6,7,8,2,3,11,1,0};
     
-    std::vector<uint_fast64_t> expectedOldToNewStateMapping = {1,2,std::numeric_limits<uint_fast64_t>::max(), 0, 2};
+    std::vector<uint_fast64_t> expectedOldToNewStateMapping = {2,1,std::numeric_limits<uint_fast64_t>::max(), 0, 1};
     
     // Note that there are other possible solutions that yield equivalent matrices / vectors.
     // In particular, the ordering within the row groups depends on the MEC decomposition implementation.
diff --git a/travis/build.sh b/travis/build.sh
index 2873a6ec7..b96e3df0f 100755
--- a/travis/build.sh
+++ b/travis/build.sh
@@ -37,6 +37,11 @@ linux)
         docker run -d -it --name storm --privileged movesrwth/storm-basesystem:$LINUX
         ;;
     esac
+    # Install doxygen if necessary
+    if [[ "$TASK" == *Doxygen* ]]
+    then
+        docker exec storm apt-get install -qq -y doxygen graphviz
+    fi
     # Copy local content into container
     docker exec storm mkdir /opt/storm
     docker cp . storm:/opt/storm
@@ -45,6 +50,7 @@ linux)
     # Execute main process
     docker exec storm bash -c "
         export CONFIG=$CONFIG;
+        export TASK=$TASK;
         export COMPILER=$COMPILER;
         export N_JOBS=$N_JOBS;
         export STLARG=;
@@ -57,7 +63,8 @@ linux)
 osx)
     # Mac OSX
     STLARG="-stdlib=libc++"
-    export CONFIG=$CONFIG
+    export CONFIG
+    export TASK
     export COMPILER
     export N_JOBS
     export STLARG
diff --git a/travis/build_helper.sh b/travis/build_helper.sh
index f44b2feb2..662b66640 100755
--- a/travis/build_helper.sh
+++ b/travis/build_helper.sh
@@ -53,27 +53,44 @@ run() {
     fi
     ;;
 
-  TestAll)
-    # Test all
-    travis_fold start test_all
-    cd build
-    ctest test --output-on-failure
-    travis_fold end test_all
-
-    # Check correctness of build types
-    echo "Checking correctness of build types"
-    case "$CONFIG" in
-    DefaultDebug*)
-        ./bin/storm --version | grep "with flags .* -g" || (echo "Error: Missing flag '-g' for debug build." && return 1)
-        ;;
-    DefaultRelease*)
-        ./bin/storm --version | grep "with flags .* -O3" || (echo "Error: Missing flag '-O3' for release build." && return 1)
-        ./bin/storm --version | grep "with flags .* -DNDEBUG" || (echo "Error: Missing flag '-DNDEBUG' for release build." && return 1)
-        ;;
-    *)
-        echo "Unrecognized value of CONFIG: $CONFIG"; exit 1
-        ;;
-    esac
+  Tasks)
+    # Perform tasks
+    if [[ "$TASK" == *Test* ]]
+    then
+        # Test all
+        travis_fold start test_all
+        cd build
+        ctest test --output-on-failure
+        travis_fold end test_all
+
+        # Check correctness of build types
+        echo "Checking correctness of build types"
+        case "$CONFIG" in
+        DefaultDebug*)
+            ./bin/storm --version | grep "with flags .* -g" || (echo "Error: Missing flag '-g' for debug build." && return 1)
+            ;;
+        DefaultRelease*)
+            ./bin/storm --version | grep "with flags .* -O3" || (echo "Error: Missing flag '-O3' for release build." && return 1)
+            ./bin/storm --version | grep "with flags .* -DNDEBUG" || (echo "Error: Missing flag '-DNDEBUG' for release build." && return 1)
+            ;;
+        *)
+            echo "Unrecognized value of CONFIG: $CONFIG"
+            exit 1
+        esac
+        cd ..
+    fi
+
+    if [[ "$TASK" == *Doxygen* ]]
+    then
+        # Generate doxygen doc
+        travis_fold start make_doc
+        cd build
+        make -j$N_JOBS doc
+        # Disable jekyll as otherwise files with starting underscore are not published
+        echo "" > doc/html/.nojekyll
+        cd ..
+        travis_fold end make_doc
+    fi
     ;;
 
   *)
diff --git a/travis/deploy_carl.sh b/travis/deploy_docker.sh
similarity index 73%
rename from travis/deploy_carl.sh
rename to travis/deploy_docker.sh
index 787e7fcec..c0fe17823 100755
--- a/travis/deploy_carl.sh
+++ b/travis/deploy_docker.sh
@@ -15,18 +15,20 @@ if [ "${TRAVIS_PULL_REQUEST}" != "false" ]; then
     exit 0;
 fi
 
+echo "Deploying $1 to Dockerhub"
+
 case $OS in
 linux)
     echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
     # Deploy as debug/release
     case "$CONFIG" in
     *DebugTravis)
-        docker commit carl movesrwth/carl:travis-debug
-        docker push movesrwth/carl:travis-debug
+        docker commit $1 movesrwth/$1:travis-debug
+        docker push movesrwth/$1:travis-debug
         ;;
     *ReleaseTravis)
-        docker commit carl movesrwth/carl:travis
-        docker push movesrwth/carl:travis
+        docker commit $1 movesrwth/$1:travis
+        docker push movesrwth/$1:travis
         ;;
     *)
         echo "Unrecognized value of CONFIG: $CONFIG"; exit 1
@@ -35,7 +37,7 @@ linux)
     ;;
 
 osx)
-    echo "Building carl on Mac OSX not used."
+    echo "Docker deployment on Mac OSX not used."
     exit 1
     ;;
 
@@ -44,4 +46,3 @@ osx)
     echo "Unsupported OS: $OS"
     exit 1
 esac
-
diff --git a/travis/deploy_storm.sh b/travis/deploy_storm.sh
deleted file mode 100755
index e1c41a3cd..000000000
--- a/travis/deploy_storm.sh
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/bin/bash -x
-
-set -e
-
-OS=$TRAVIS_OS_NAME
-
-# Do not deploy if credentials are not given
-if [ "${TRAVIS_SECURE_ENV_VARS}" == "false" ]; then
-    echo "WARNING: Not deploying as no credentials are given."
-    exit 0;
-fi
-
-# Do not deploy for pull requests
-if [ "${TRAVIS_PULL_REQUEST}" != "false" ]; then
-    exit 0;
-fi
-
-case $OS in
-linux)
-    echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
-    # Deploy as debug/release
-    case "$CONFIG" in
-    *DebugTravis)
-        docker commit storm movesrwth/storm:travis-debug
-        docker push movesrwth/storm:travis-debug
-        ;;
-    *ReleaseTravis)
-        docker commit storm movesrwth/storm:travis
-        docker push movesrwth/storm:travis
-        ;;
-    *)
-        echo "Unrecognized value of CONFIG: $CONFIG"; exit 1
-        ;;
-    esac
-    ;;
-
-osx)
-    echo "Building Storm on Mac OSX not used."
-    exit 1
-    ;;
-
-*)
-    # Unknown OS
-    echo "Unsupported OS: $OS"
-    exit 1
-esac
-
diff --git a/travis/generate_travis.py b/travis/generate_travis.py
index 3ca733d24..4493a3686 100644
--- a/travis/generate_travis.py
+++ b/travis/generate_travis.py
@@ -1,38 +1,37 @@
 # Generate .travis.yml automatically
 
 # Configuration for Linux
-configs_linux = [
-    # OS, compiler, build type
-    ("ubuntu-18.04", "gcc", "DefaultDebug"),
-    ("ubuntu-18.04", "gcc", "DefaultRelease"),
-    ("debian-9", "gcc", "DefaultDebug"),
-    ("debian-9", "gcc", "DefaultRelease"),
-    ("ubuntu-18.10", "gcc", "DefaultDebug"),
-    ("ubuntu-18.10", "gcc", "DefaultRelease"),
-    ("ubuntu-19.04", "gcc", "DefaultDebugTravis"),
-    ("ubuntu-19.04", "gcc", "DefaultReleaseTravis"),
-]
-
-# Configurations for Mac
-configs_mac = [
-    # OS, compiler, build type
-#    ("osx", "clang", "DefaultDebug"),
-#    ("osx", "clang", "DefaultRelease"),
+configs = [
+    # OS, OS version, compiler, build type, task
+    ("ubuntu", "18.04", "gcc", "DefaultDebug", "Test"),
+    ("ubuntu", "18.04", "gcc", "DefaultRelease", "Test"),
+    ("debian", "9", "gcc", "DefaultDebug", "Test"),
+    ("debian", "9", "gcc", "DefaultRelease", "Test"),
+    ("ubuntu", "18.10", "gcc", "DefaultDebug", "Test"),
+    ("ubuntu", "18.10", "gcc", "DefaultRelease", "Test"),
+    ("ubuntu", "19.04", "gcc", "DefaultDebugTravis", "TestDocker"),
+    ("ubuntu", "19.04", "gcc", "DefaultReleaseTravis", "TestDockerDoxygen"),
+#    ("osx", "xcode9.3", "clang", "DefaultDebug", "Test"),
+#    ("osx", "xcode9.3", "clang", "DefaultRelease", "Test"),
 ]
 
 # Stages in travis
-stages = [
+build_stages = [
     ("Build (1st run)", "Build1"),
     ("Build (2nd run)", "Build2"),
     ("Build (3rd run)", "Build3"),
     ("Build (4th run)", "BuildLast"),
-    ("Test all", "TestAll"),
+    ("Tasks", "Tasks"),
 ]
 
+def get_env_string(os, os_version, compiler, build_type, task):
+    if os == "osx":
+        return "CONFIG={} TASK={} COMPILER={} STL=libc++\n".format(build_type, task, compiler)
+    else:
+        return "CONFIG={} TASK={} LINUX={} COMPILER={}\n".format(build_type, task, "{}-{}".format(os, os_version), compiler)
 
-if __name__ == "__main__":
-    allow_failures = []
 
+if __name__ == "__main__":
     s = ""
     # Initial config
     s += "#\n"
@@ -43,7 +42,6 @@ if __name__ == "__main__":
     s += "  - master\n"
     s += "  - stable\n"
     s += "sudo: required\n"
-    s += "dist: trusty\n"
     s += "language: cpp\n"
     s += "\n"
     s += "git:\n"
@@ -79,97 +77,80 @@ if __name__ == "__main__":
     s += "    ###\n"
     s += "    # Stage: Build Carl\n"
     s += "    ###\n"
-    s += "\n"
-    for config in configs_linux:
-        linux = config[0]
-        compiler = config[1]
-        build_type = config[2]
+    for config in configs:
+        os, os_version, compiler, build_type, task = config
+        os_type = "osx" if os == "osx" else "linux"
         if "Travis" in build_type:
-            s += "    # {} - {}\n".format(linux, build_type)
+            s += "    # {}-{} - {}\n".format(os, os_version, build_type)
             buildConfig = ""
             buildConfig += "    - stage: Build Carl\n"
-            buildConfig += "      os: linux\n"
+            buildConfig += "      os: {}\n".format(os_type)
             buildConfig += "      compiler: {}\n".format(compiler)
-            buildConfig += "      env: CONFIG={} LINUX={} COMPILER={}\n".format(build_type, linux, compiler)
-            buildConfig += "      install:\n"
-            buildConfig += "        - travis/install_linux.sh\n"
+            buildConfig += "      env: {}".format(get_env_string(os, os_version, compiler, build_type, task))
             buildConfig += "      before_script:\n"
             buildConfig += '        - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode\n'
             buildConfig += "      script:\n"
             buildConfig += "        - travis/build_carl.sh\n"
+            buildConfig += "      before_cache:\n"
+            buildConfig += "        - docker cp carl:/opt/carl/. .\n"
             # Upload to DockerHub
-            buildConfig += "      after_success:\n"
-            buildConfig += "        - travis/deploy_carl.sh\n"
+            buildConfig += "      deploy:\n"
+            buildConfig += "        - provider: script\n"
+            buildConfig += "          skip_cleanup: true\n"
+            buildConfig += "          script: bash travis/deploy_docker.sh carl\n"
             s += buildConfig
 
-    # Generate all configurations
-    for stage in stages:
+    # Generate all build configurations
+    for stage in build_stages:
         s += "\n"
         s += "    ###\n"
         s += "    # Stage: {}\n".format(stage[0])
         s += "    ###\n"
-        s += "\n"
-        # Mac OS X
-        for config in configs_mac:
-            osx = config[0]
-            compiler = config[1]
-            build_type = config[2]
-            s += "    # {} - {}\n".format(osx, build_type)
+        for config in configs:
+            os, os_version, compiler, build_type, task = config
+            os_type = "osx" if os == "osx" else "linux"
+            s += "    # {}-{} - {}\n".format(os, os_version, build_type)
             buildConfig = ""
             buildConfig += "    - stage: {}\n".format(stage[0])
-            buildConfig += "      os: osx\n"
-            buildConfig += "      osx_image: xcode9.1\n"
+            buildConfig += "      os: {}\n".format(os_type)
+            if os_type == "osx":
+                buildConfig += "      osx_image: {}\n".format(os_version)
             buildConfig += "      compiler: {}\n".format(compiler)
-            buildConfig += "      env: CONFIG={} COMPILER={} STL=libc++\n".format(build_type, compiler)
+            buildConfig += "      env: {}".format(get_env_string(os, os_version, compiler, build_type, task))
             buildConfig += "      install:\n"
             if stage[1] == "Build1":
                 buildConfig += "        - rm -rf build\n"
-            buildConfig += "        - travis/install_osx.sh\n"
+            buildConfig += "        - travis/skip_test.sh\n"
+            if os_type == "osx":
+                buildConfig += "        - travis/install_osx.sh\n"
             buildConfig += "      before_script:\n"
             buildConfig += '        - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode\n'
             buildConfig += "      script:\n"
             buildConfig += "        - travis/build.sh {}\n".format(stage[1])
+            if os_type == "linux":
+                buildConfig += "      before_cache:\n"
+                buildConfig += "        - docker cp storm:/opt/storm/. .\n"
             buildConfig += "      after_failure:\n"
             buildConfig += "        - find build -iname '*err*.log' -type f -print -exec cat {} \;\n"
-            s += buildConfig
 
-        # Linux via Docker
-        for config in configs_linux:
-            allow_fail = ""
-            linux = config[0]
-            compiler = config[1]
-            build_type = config[2]
-            s += "    # {} - {}\n".format(linux, build_type)
-            buildConfig = ""
-            buildConfig += "    - stage: {}\n".format(stage[0])
-            allow_fail += "    - stage: {}\n".format(stage[0])
-            buildConfig += "      os: linux\n"
-            allow_fail += "      os: linux\n"
-            buildConfig += "      compiler: {}\n".format(compiler)
-            buildConfig += "      env: CONFIG={} LINUX={} COMPILER={}\n".format(build_type, linux, compiler)
-            allow_fail += "      env: CONFIG={} LINUX={} COMPILER={}\n".format(build_type, linux, compiler)
-            buildConfig += "      install:\n"
-            if stage[1] == "Build1":
-                buildConfig += "        - rm -rf build\n"
-            buildConfig += "        - travis/install_linux.sh\n"
-            buildConfig += "      before_script:\n"
-            buildConfig += '        - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" # Workaround for nonblocking mode\n'
-            buildConfig += "      script:\n"
-            buildConfig += "        - travis/build.sh {}\n".format(stage[1])
-            buildConfig += "      before_cache:\n"
-            buildConfig += "        - docker cp storm:/opt/storm/. .\n"
-            buildConfig += "      after_failure:\n"
-            buildConfig += "        - find build -iname '*err*.log' -type f -print -exec cat {} \;\n"
-            # Upload to DockerHub
-            if stage[1] == "TestAll" and "Travis" in build_type:
-                buildConfig += "      after_success:\n"
-                buildConfig += "        - travis/deploy_storm.sh\n"
+            # Deployment
+            if stage[1] == "Tasks":
+                if "Docker" in task or "Doxygen" in task:
+                    buildConfig += "      deploy:\n"
+                if "Docker" in task:
+                    buildConfig += "        - provider: script\n"
+                    buildConfig += "          skip_cleanup: true\n"
+                    buildConfig += "          script: bash travis/deploy_docker.sh storm\n"
+                if "Doxygen" in task:
+                    buildConfig += "        - provider: pages\n"
+                    buildConfig += "          skip_cleanup: true\n"
+                    buildConfig += "          github_token: $GITHUB_TOKEN\n"
+                    buildConfig += "          local_dir: build/doc/html/\n"
+                    buildConfig += "          repo: moves-rwth/storm-doc\n"
+                    buildConfig += "          target_branch: master\n"
+                    buildConfig += "          on:\n"
+                    buildConfig += "            branch: master\n"
+
             s += buildConfig
-            if "Travis" in build_type and "Release" in build_type:
-                allow_failures.append(allow_fail)
 
-    if len(allow_failures) > 0:
-        s += "  allow_failures:\n"
-        for fail in allow_failures:
-            s += fail
     print(s)
diff --git a/travis/mtime_cache/globs.txt b/travis/mtime_cache/globs.txt
index a09cba27a..07312cb3d 100644
--- a/travis/mtime_cache/globs.txt
+++ b/travis/mtime_cache/globs.txt
@@ -1,5 +1,8 @@
 src/**/*.{%{cpp}}
+src/**/*.{in}
 src/**/CMakeLists.txt
 CMakeLists.txt
+*.{in}
 resources/3rdparty/**/*.{%{cpp}}
 resources/3rdparty/eigen-3.3-beta1/StormEigen/**/*
+resources/3rdparty/eigen-3.3-beta1/unsupported/**/*
diff --git a/travis/install_linux.sh b/travis/skip_test.sh
similarity index 60%
rename from travis/install_linux.sh
rename to travis/skip_test.sh
index 62a7d75f6..416850102 100755
--- a/travis/install_linux.sh
+++ b/travis/skip_test.sh
@@ -1,11 +1,6 @@
 #!/bin/bash
-
-set -e
-
 # Skip this run?
 if [ -f build/skip.txt ]
 then
   exit 0
 fi
-
-#sudo apt-get install -qq -y docker